diff --git a/.cursor/mcp.json b/.cursor/mcp.json new file mode 120000 index 00000000000..c67157dc4ab --- /dev/null +++ b/.cursor/mcp.json @@ -0,0 +1 @@ +../.mcp.json \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 946f2bad604..ffbb41a421a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -83,6 +83,21 @@ jobs: working-directory: apps/desktop run: bun run install:deps + # `bun install --ignore-scripts` skips postinstalls for safety; the + # @vscode/ripgrep package uses its postinstall to download the + # platform-specific ripgrep binary. workspace-fs tests that exercise + # the streaming / multiline paths need that binary, so run the + # postinstall explicitly for just this package. + - name: Download bundled ripgrep binary + run: | + rg_pkg=$(ls -d node_modules/.bun/@vscode+ripgrep@*/node_modules/@vscode/ripgrep | head -1) + if [ -z "$rg_pkg" ]; then + echo "::error::@vscode/ripgrep not found in node_modules" + exit 1 + fi + node "$rg_pkg/lib/postinstall.js" + ls -la "$rg_pkg/bin" || true + - name: Test env: RELAY_URL: https://relay.superset.sh diff --git a/.github/workflows/cleanup-preview.yml b/.github/workflows/cleanup-preview.yml index 9f87b92173f..2af3e565286 100644 --- a/.github/workflows/cleanup-preview.yml +++ b/.github/workflows/cleanup-preview.yml @@ -22,6 +22,17 @@ jobs: branch: ${{ github.event.pull_request.head.ref }} api_key: ${{ secrets.NEON_API_KEY }} + - name: Setup Fly CLI + uses: superfly/flyctl-actions/setup-flyctl@ed8efb33836e8b2096c7fd3ba1c8afe303ebbff1 # 1.6 + + - name: Delete Electric Fly.io app + id: electric-cleanup + continue-on-error: true + env: + FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} + run: | + flyctl apps destroy "superset-electric-pr-${{ github.event.pull_request.number }}" --yes + - name: Update comment if: always() uses: thollander/actions-comment-pull-request@24bffb9b452ba05a4f3f77933840a6a841d1b32b # v3.0.1 @@ -31,6 +42,7 @@ jobs: The following preview resources have been cleaned up: - ${{ steps.neon-cleanup.outcome == 'success' && '✅' || '⚠️' }} Neon database branch + - ${{ steps.electric-cleanup.outcome == 'success' && '✅' || '⚠️' }} Electric Fly.io app Thank you for your contribution! 🎉 comment-tag: "🚀-preview-deployment" diff --git a/.github/workflows/deploy-production.yml b/.github/workflows/deploy-production.yml index 23efd918d66..ced6ac2d246 100644 --- a/.github/workflows/deploy-production.yml +++ b/.github/workflows/deploy-production.yml @@ -442,6 +442,33 @@ jobs: --env SECRETS_ENCRYPTION_KEY=$SECRETS_ENCRYPTION_KEY \ --env ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY + deploy-electric: + name: Deploy Electric to Fly.io + runs-on: ubuntu-latest + environment: production + + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1 + + - name: Setup Fly CLI + uses: superfly/flyctl-actions/setup-flyctl@ed8efb33836e8b2096c7fd3ba1c8afe303ebbff1 # 1.6 + + - name: Stage secrets + env: + FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} + run: | + flyctl secrets set \ + DATABASE_URL="${{ secrets.DATABASE_URL_UNPOOLED }}" \ + ELECTRIC_SECRET="${{ secrets.ELECTRIC_SECRET }}" \ + --app superset-electric \ + --stage + + - name: Deploy to Fly.io + env: + FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} + run: flyctl deploy . --config fly.toml --remote-only + deploy-electric-proxy: name: Deploy Electric Proxy to Cloudflare runs-on: ubuntu-latest diff --git a/.github/workflows/generate-changelog.yml b/.github/workflows/generate-changelog.yml index 3e09f040b09..17aeda11096 100644 --- a/.github/workflows/generate-changelog.yml +++ b/.github/workflows/generate-changelog.yml @@ -9,6 +9,7 @@ on: jobs: generate-changelog: name: Generate Changelog + if: github.repository == 'superset-sh/superset' runs-on: ubuntu-latest permissions: contents: write diff --git a/.github/workflows/release-desktop-canary.yml b/.github/workflows/release-desktop-canary.yml index 89d7ef1a5e0..061eb3a0ab1 100644 --- a/.github/workflows/release-desktop-canary.yml +++ b/.github/workflows/release-desktop-canary.yml @@ -18,6 +18,7 @@ permissions: jobs: check-changes: name: Check for changes + if: github.repository == 'superset-sh/superset' runs-on: ubuntu-latest outputs: should_build: ${{ steps.check.outputs.should_build }} @@ -69,7 +70,7 @@ jobs: build: needs: check-changes - if: needs.check-changes.outputs.should_build == 'true' + if: github.repository == 'superset-sh/superset' && needs.check-changes.outputs.should_build == 'true' uses: ./.github/workflows/build-desktop.yml with: channel: canary @@ -82,7 +83,7 @@ jobs: release: name: Update Canary Release needs: [check-changes, build] - if: needs.check-changes.outputs.should_build == 'true' + if: github.repository == 'superset-sh/superset' && needs.check-changes.outputs.should_build == 'true' runs-on: ubuntu-latest steps: diff --git a/.github/workflows/setup-automations-schedule.yml b/.github/workflows/setup-automations-schedule.yml new file mode 100644 index 00000000000..e5c3f6ee27c --- /dev/null +++ b/.github/workflows/setup-automations-schedule.yml @@ -0,0 +1,36 @@ +name: Setup Automations Schedule + +on: + workflow_dispatch: + +jobs: + setup: + name: Ensure QStash schedule exists + runs-on: ubuntu-latest + environment: production + + steps: + - name: Checkout repository + uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1 + + - name: Setup Bun + id: setup-bun + uses: oven-sh/setup-bun@0c5077e51419868618aeaa5fe8019c62421857d6 # v2.2.0 + with: + bun-version-file: .bun-version + + - name: Cache dependencies + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: ~/.bun/install/cache + key: ${{ runner.os }}-bun-${{ steps.setup-bun.outputs.bun-revision }}-${{ hashFiles('bun.lock') }} + + - name: Install dependencies + run: bun install --frozen --ignore-scripts + + - name: Create schedule + env: + NEXT_PUBLIC_API_URL: ${{ secrets.NEXT_PUBLIC_API_URL }} + QSTASH_TOKEN: ${{ secrets.QSTASH_TOKEN }} + QSTASH_URL: ${{ secrets.QSTASH_URL }} + run: bun run apps/api/scripts/setup-automations-schedule.ts diff --git a/.github/workflows/triage-issue.yml b/.github/workflows/triage-issue.yml index 0457c711311..1d44af19c4f 100644 --- a/.github/workflows/triage-issue.yml +++ b/.github/workflows/triage-issue.yml @@ -17,6 +17,7 @@ concurrency: jobs: triage: name: Triage Issue + if: github.repository == 'superset-sh/superset' runs-on: ubuntu-latest timeout-minutes: 25 permissions: diff --git a/.github/workflows/update-docs.yml b/.github/workflows/update-docs.yml index cbcf109f42a..056b0a6b5fe 100644 --- a/.github/workflows/update-docs.yml +++ b/.github/workflows/update-docs.yml @@ -9,6 +9,7 @@ on: jobs: update-docs: name: Update Docs + if: github.repository == 'superset-sh/superset' runs-on: ubuntu-latest permissions: contents: write diff --git a/.gitignore b/.gitignore index f4011029bab..9f925b63328 100644 --- a/.gitignore +++ b/.gitignore @@ -54,6 +54,10 @@ next-env.d.ts # Superset (track scripts/config; ignore generated workspace artifacts) .superset/ports.json .superset/config.local.json +# Fork-local: TODO autonomous agent runtime artifacts (goal.md, state files) +.superset/todo/ +# Fork-local: Claude Code's local worktree scratch dirs +.claude/worktrees/ # tsbuildinfo *.tsbuildinfo @@ -79,14 +83,20 @@ apps/streams/data/ # Generated by setup.sh Caddyfile superset-dev-data/ +.upstream-builds/ # Codex workspace config (track only shared config/symlinks; ignore runtime state) .codex/* !.codex/config.toml !.codex/commands !.codex/prompts +.serena/ +test-conflict-repo/ .amp/* -# MCP config (contains per-user server URLs/tokens) -.mcp.json -.cursor/mcp.json +# Crush project context +.crush/ + +# Claude Code session lock (runtime artifact) +.claude/scheduled_tasks.lock +temp/ diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 00000000000..3651aa27461 --- /dev/null +++ b/.mcp.json @@ -0,0 +1,37 @@ +{ + "mcpServers": { + "superset": { + "type": "http", + "url": "https://api.superset.sh/api/agent/mcp" + }, + "expo-mcp": { + "type": "http", + "url": "https://mcp.expo.dev/mcp", + "enabled": false + }, + "maestro": { + "command": "maestro", + "args": ["mcp"] + }, + "neon": { + "type": "http", + "url": "https://mcp.neon.tech/mcp" + }, + "linear": { + "type": "http", + "url": "https://mcp.linear.app/mcp" + }, + "sentry": { + "type": "http", + "url": "https://mcp.sentry.dev/mcp" + }, + "posthog": { + "type": "http", + "url": "https://mcp.posthog.com/mcp" + }, + "desktop-automation": { + "command": "bun", + "args": ["run", "packages/desktop-mcp/src/bin.ts"] + } + } +} diff --git a/AGENTS.md b/AGENTS.md index adce133d3ab..9589ece0598 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,9 +1,5 @@ # Superset Monorepo Guide -## Question Tool - -When you need to ask the user ANY question — including simple yes/no, confirmations, and clarifications — ALWAYS use the `ask_user` tool. Never ask questions in plain text. The Superset UI renders `ask_user` calls as an interactive overlay with clickable option buttons; plain-text questions will not be surfaced to the user in the same way. - Guidelines for agents and developers working in this repository. ## Structure @@ -62,6 +58,205 @@ bun run clean # Clean root node_modules bun run clean:workspaces # Clean all workspace node_modules ``` +## MocA-Love/superset フォーク向け: Desktop アプリのビルドとリリース + +このフォーク固有のビルド手順とリリースフロー。本家 (superset-sh/superset) とは配布先やチャネルが異なる。 + +### ローカル開発 + +`apps/desktop` の dev 起動: + +```bash +cd apps/desktop +SUPERSET_WORKSPACE_NAME=superset SKIP_ENV_VALIDATION=1 DESKTOP_VITE_PORT=5222 bun run dev +``` + +- **`SUPERSET_WORKSPACE_NAME=superset` は必須**。未指定だと dev 環境のワークスペースデータが意図せず消える。 +- dev 起動時に `bun run predev` で `scripts/patch-dev-protocol.ts` が走り、プロトコルハンドラをパッチ。 + +### ローカルでの配布ビルド確認 + +```bash +cd apps/desktop + +# 1. コンパイル (electron-vite build) +bun run compile:app + +# 2. ネイティブモジュール複製 + ランタイム検証 +bun run copy:native-modules +bun run validate:native-runtime + +# 3. electron-builder でパッケージ (配布物の dmg / zip を生成、未 publish) +bun run build +# または package のみ (publish 条件を無視) +bun run package +``` + +`bun run build` は `--publish never` で実行され、`apps/desktop/release/` に成果物 (dmg / zip / blockmap / latest-mac.yml) が出る。 + +### かつて存在した dmg 破損バグ(v1.5.5-fork.9〜11、v1.5.5-fork.12 で解決済み) + +v1.5.5 系で app サイズが 1.6GB → 1.9GB に成長した結果、`bun run build` が生成する dmg から Electron Framework 本体 (167MB) が欠落する問題が発生していた。 + +**真因:** electron-builder が内部で使う `dmgbuild 1.2.0` (`~/Library/Caches/electron-builder/dmg-builder@1.2.0/.../dmgbuild/core.py:448-474`) はスパースイメージのサイズを `app ファイル合計 + 128MB` で算出するが、**HFS+ のカタログ/ジャーナル overhead (≈150MB+) を考慮しないため、app が 1.8GB を超えると `ditto` が "No space left on device" で停止する**。途中でコピーされる locale.pak 群の後に来る Electron Framework binary が丸ごと欠けた状態で dmg が完成する (zip は別経路で無傷のまま)。 + +**対処:** `apps/desktop/electron-builder.ts` の `dmg.size` で明示的に大きめの値を指定して dmgbuild の自動計算をバイパスする。 + +```ts +dmg: { + size: "4g", // app 成長を先読みして余裕を取る +}, +``` + +electron-builder schema 公式ドキュメントにも "Set this explicitly for large apps or apps with sparse files to avoid 'No space left on device' errors." と明記されている。size は `hdiutil -size` と同じ書式 ("150m"/"4g" 等)。 + +**app 本体が今後 3.5GB を超え始めたら `size` を上げ直す**。 + +**dmg 検証手順(念のため):** + +```bash +hdiutil attach apps/desktop/release/Superset-*-arm64.dmg -nobrowse -readonly +# 4点セットが揃っているか確認: +# - Superset.app/Contents/Frameworks/Electron Framework.framework/Versions/A/Electron Framework (167MB+) +# - Applications -> /Applications シンボリックリンク +# - .DS_Store (アイコン配置・ウィンドウサイズ) +# - .background.tiff (背景画像) +ls -la "/Volumes/Superset 1.5.5-arm64/" +ls -la "/Volumes/Superset 1.5.5-arm64/Superset.app/Contents/Frameworks/Electron Framework.framework/Versions/A/Electron Framework" +hdiutil detach "/Volumes/Superset 1.5.5-arm64" +``` + +**❌ 過去に試した回避策 (dmg 単独再実行 / hdiutil 手動生成) はどれも不完全。** `dmg.size` 指定が唯一のクリーンな正解。 + +### リリース (フォーク配布) + +このフォークには `release-desktop.yml` のような**自動リリースワークフローは無い**。タグ命名も upstream の `desktop-v*.*.*` ではなく **`v-fork.`** (例: `v1.5.5-fork.9`) を使う。リリースはローカルでビルドし `gh release create` で GitHub Release を作る**手動運用**。 + +`electron-builder.ts` の `publish` 設定は `superset-sh/superset` を向いたままなので、`bun run release` (electron-builder --publish always) は**使わない**。代わりに `bun run build` (`--publish never`) で成果物だけ作り、`gh` で MocA-Love/superset に上げる。 + +#### 手順 + +```bash +# 1. main を最新化 +git checkout main && git pull + +# 2. 前回の fork タグを確認して次の番号を決める +gh release list --repo MocA-Love/superset --limit 3 +git tag -l 'v*-fork.*' | sort -V | tail -5 + +# 3. 必要なら apps/desktop/package.json の version を更新 +# package.json version は upstream トラッキング、fork 番号は別管理。 +# upstream の major/minor が上がった時だけ package.json version を更新し、 +# fork.N はその version の中でインクリメント。 +# 例: package.json 1.5.5 のまま → タグは v1.5.5-fork.8, v1.5.5-fork.9 ... と進む + +# 4. node_modules を完全クリーン (後述のチェックリスト必須) +rm -rf node_modules apps/*/node_modules packages/*/node_modules +bun install + +# 5. ビルド +cd apps/desktop +# ⚠️ ビルド時の環境変数は必須。compile:app 時にバンドルへ焼き込まれる: +# - SUPERSET_WORKSPACE_NAME=superset … 未指定だとバンドルに "default" が焼き込まれ、 +# ~/.superset-default/ の空ワークスペースを見るようになる +# (ユーザーの ~/.superset/ 過去データに辿り着けず、強制ログイン画面化) +# - SENTRY_DSN_DESKTOP … .env にあるが compile:app は dotenv を自動ロードしない +# ので明示 export しないと Sentry が初期化されない +export SUPERSET_WORKSPACE_NAME=superset +export $(grep -v '^#' ../../.env | grep -E '^[A-Z_]+=' | tr '\n' ' ') +bun run compile:app +bun run copy:native-modules +bun run validate:native-runtime +# ⚠️ superset-browser-mcp の単一バイナリを必ず先に生成する。 +# Bun は npm/yarn と違って pre/post フックを自動実行しないため、 +# `bun run build` を直接叩くと package.json の `prebuild` +# (= build:browser-mcp) がスキップされ、packages/superset-browser-mcp/dist/ +# が空のまま electron-builder が走り、extraResources の +# `superset-browser-mcp` が黙って脱落する (= アプリ内に +# Resources/resources/superset-browser-mcp/ が作られず、 +# Connect モーダルで "Browser MCP binary is not available in this build" +# トーストが出る)。v1.5.5-fork.12 までこの漏れで出荷していた。 +bun run build:browser-mcp +SUPERSET_WORKSPACE_NAME=superset bun run build +# 成果物は apps/desktop/release/ に出力 (dmg, zip, blockmap, latest-mac.yml 等) +# `bun run build` は CSC_IDENTITY_AUTO_DISCOVERY=false 付きで adhoc sign になる。 +# keychain に Apple Development 証明書があっても拾わせない (配布用 Developer ID ではないので)。 + +# 6. リリースノート作成 (後述フォーマット) +# ファイルに保存しておくと gh release create の --notes-file で渡せる + +# 7. タグを切って push +cd ../.. +git tag v1.5.5-fork.9 +git push origin v1.5.5-fork.9 + +# 8. GitHub Release を作成し成果物をアップロード +gh release create v1.5.5-fork.9 \ + --repo MocA-Love/superset \ + --title "v1.5.5-fork.9" \ + --notes-file /tmp/release-notes-fork9.md \ + apps/desktop/release/Superset-*.dmg \ + apps/desktop/release/Superset-*-mac.zip \ + apps/desktop/release/Superset-*-mac.zip.blockmap \ + apps/desktop/release/latest-mac.yml +``` + +**注:** Windows / Linux ビルドは手元が macOS の場合は作成されない。必要なら各 OS で実行するか、GitHub Actions を別途仕立てる。upstream から取り込んだ `release-desktop.yml` は発火条件 (`desktop-v*.*.*`) がフォーク運用と合わないため、fork タグでは動かない。 + +#### リリースノート フォーマット + +`v1.5.5-fork.6` / `fork.7` で使った形式に揃える。トップレベルは原則: + +```markdown +## Highlights +### <主要機能 1 のタイトル> (#PR番号) +- 一言で何が変わったか +- 影響範囲 / 設定方法 / opt-in 要否など補足 +- 関連 PR や regression 対応があれば括弧で + +### <主要機能 2 のタイトル> +- ... + +## Upstream 取り込み ← upstream の PR を取り込んだ場合のみ +- 簡潔な日本語サマリ (upstream PR #xxxx) (our PR #YYY) +- mastracode / v2 editor 等の大物は 1 行追加説明可 + +## Bug Fixes +- 修正内容 (#PR) +- regression や revert/re-submit の流れはまとめて 1 行に圧縮 + +## Internal ← 任意、外形に影響しない整理系 +- AGENTS.md 改訂 / lockfile 整理 / CI 調整など + +**Full Changelog**: https://github.com/MocA-Love/superset/compare/<前回タグ>...<今回タグ> +``` + +ルール: +- **日本語で書く**。PR 番号は本家(`#3xxx`)とフォーク(`#xxx`)を区別、upstream 取り込みは「(upstream #3517) (#313)」のように両方併記 +- Highlights は **機能単位で 3〜6 セクション**。細かい UX 改善はまとめて `## UX / UI 改善` の単一セクションに寄せる +- Bug Fixes は**ユーザー影響がある修正のみ**。内部リファクタや lint 対応は Internal に +- 最後の Full Changelog リンクは必ず付ける (差分を追いやすくするため) + +### ビルド前チェックリスト — 忘れずに + +dependency bump や upstream 取り込み後にリリースビルドを走らせる前、以下を必ず実施: + +```bash +# lockfile と node_modules の整合性を取り直す (重複残骸を除去) +rm -rf node_modules apps/*/node_modules packages/*/node_modules +bun install +``` + +**理由:** `bun install` を override 切替や複数回 install で繰り返すと、lockfile には 1 バージョンしか無いのに node_modules 内に旧バージョンの残骸が残ることがある。この状態でビルドすると配布版に重複パッケージが混入し、`@pierre/diffs` 等の Web Components が `customElements.define` で二重登録され、DiffViewer のセパレータ枠線が白くなる等の UI 崩壊を引き起こす (参考: PR #332 / #333)。 + +以下のタイミングでは毎回上記のフルクリーンを挟むこと: +- dependency bump を含む PR を main にマージした直後 +- `overrides` や `patchedDependencies` を変更した後 +- `desktop-v*` タグを切る直前 +- `release-desktop.yml` を手動トリガーする前 + +CI 側でもワークフロー実行前に `node_modules` を毎回ゼロから作っていれば問題ないが、ローカル確認時は特に注意。 + ## Code Quality **Biome runs at root level** (not per-package) for speed: @@ -76,7 +271,8 @@ bun run clean:workspaces # Clean all workspace node_modules 3. **Shared command source** - keep command definitions in `.agents/commands/` only. `.claude/commands` and `.cursor/commands` should be symlinks to `../.agents/commands`. (`packages/chat` discovers slash commands from `.claude/commands`.) 4. **Workspace MCP config** - keep shared MCP servers in `.mcp.json`; `.cursor/mcp.json` should link to `../.mcp.json`. Codex uses `.codex/config.toml` (run with `CODEX_HOME=.codex codex ...`). OpenCode uses `opencode.json` and should mirror the same MCP set using OpenCode's `remote`/`local` schema. 5. **Mastra dependencies** - use the published upstream `mastracode` and `@mastra/*` packages. Do not add fork tarball overrides or custom patch steps unless explicitly requested. -6. **Plan & doc placement** - implementation plans go in `plans/` (cross-cutting) or `apps//plans/` (app-scoped); shipped plans move to `plans/done/`. Architecture/reference docs go in `/docs/`. Never drop `*_PLAN.md` at an app root or inside `src/`. +6. **Package age security policy** - global `npm`, `bun`, `pnpm`, and `uv` configs enforce a 7-day minimum release age, and `npm` also has `ignore-scripts=true`. If package install/update/add commands fail because a version is too new or a lifecycle script is blocked, do not keep retrying, disable the policy, or suggest bypass flags. Choose an older version that satisfies the policy, or stop and surface the blocked dependency clearly. +7. **Plan & doc placement** - implementation plans go in `plans/` (cross-cutting) or `apps//plans/` (app-scoped); shipped plans move to `plans/done/`. Architecture/reference docs go in `/docs/`. Never drop `*_PLAN.md` at an app root or inside `src/`. --- diff --git a/README.md b/README.md index e1d935232ec..648235d9497 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,117 @@ Works with any CLI agent. Built for local worktree-based development. +## Fork 固有の変更点 + +このリポジトリは [superset-sh/superset](https://github.com/superset-sh/superset) のフォークです。以下の独自変更が含まれています。 + +| 変更 | 概要 | PR | 追加日 | +|:-----|:-----|:--:|:------:| +| **Excel/スプレッドシート ビューア** | .xlsx/.xls/.ods ファイルを書式付きで表示。罫線・結合セル・テーマカラー・リッチテキスト対応。複数シートタブ切り替え、コンテナ幅への自動フィット | [#1](https://github.com/MocA-Love/superset/pull/1) | 2026-03-27 | +| **Excel diff ビューア** | スプレッドシートのサイドバイサイド差分表示。セル単位の変更ハイライト、Prev/Next ナビゲーション、左右同期スクロール | [#1](https://github.com/MocA-Love/superset/pull/1) | 2026-03-27 | +| **フォーク版アップデート通知** | 本家 electron-updater を無効化し、GitHub API でフォークリリースをチェックする方式に変更。新バージョン検出時にトースト通知を表示し「Open releases」からダウンロードページへ遷移。4時間ごと+起動時に自動チェック | [#3](https://github.com/MocA-Love/superset/pull/3) [#17](https://github.com/MocA-Love/superset/pull/17) | 2026-03-29 | +| **ブラウザ webview リロード防止** | タブ/ワークスペース切り替え時に Electron の webview がリロードされる問題を修正。webview を含むタブを keep-alive し、ワークスペースページをルーター上位で保持。WorkspaceIdContext による正しいコンテキスト分離、ホットキーの active-only 制御も実装 | [#2](https://github.com/MocA-Love/superset/pull/2) | 2026-03-28 | +| **マウス戻る/進むボタン対応** | ブラウザ webview 内でマウスの戻る/進むボタンが動作するように対応。macOS は guest ページへのスクリプト注入、Windows/Linux は app-command イベントで処理 | [#2](https://github.com/MocA-Love/superset/pull/2) | 2026-03-28 | +| **AI コミットメッセージ生成** | コミットメッセージ入力欄のスパークルボタンで AI が conventional commit メッセージを日本語で自動生成。階層的要約方式(gptcommit 式)により大量差分でも高精度。staged/unstaged/untracked 全対応、lock ファイル・バイナリ自動スキップ | [#4](https://github.com/MocA-Love/superset/pull/4) | 2026-03-28 | +| **ポートリストのリサイズ・フィルタ** | サイドバーの Ports セクションの高さをドラッグでリサイズ可能に(80–600px、永続化)。フィルタトグルで ports.json に定義されたポートのみ表示し、自動検出ポートを非表示にできる | [#6](https://github.com/MocA-Love/superset/pull/6) | 2026-03-28 | +| **大規模ファイル diff 高速化** | 2000行超のファイルで CodeMirror 6 ベースの仮想化 diff ビューアに自動切替。ビューポート分のDOMのみ描画し、15000行でもスムーズ表示。既存テーマ・シンタックスハイライト再利用、未変更領域の自動折りたたみ | [#5](https://github.com/MocA-Love/superset/pull/5) | 2026-03-28 | +| **ports.json ポートの常時表示** | ports.json に定義されたポートをプロセス検出の有無にかかわらず常にサイドバーに表示。Docker 等で検知できないポートもラベル付きで一覧に出る。検出済みポートは従来通りアクティブ表示、未検出は グレー表示で区別 | [#7](https://github.com/MocA-Love/superset/pull/7) | 2026-03-28 | +| **Ports ワークスペース名の改善** | Ports セクションのワークスペース名をワークツリーのディレクトリ名ベースに変更。同名ワークスペースが複数ある場合でもどのワークツリーか一目で区別可能 | [#8](https://github.com/MocA-Love/superset/pull/8) | 2026-03-28 | +| **ブラウザタブ機能強化** | ズーム倍率表示と [-]/[+] ボタン(Cmd+/- と同期)、target="_blank" リンクや Cmd+click を新しいブラウザタブで開く機能、URL コピーボタンを追加。タブが非表示中でもリンクイベントを正しく処理するグローバルハンドラ実装 | [#10](https://github.com/MocA-Love/superset/pull/10) | 2026-03-29 | +| **タブのポップアウト** | ペインツールバーの Pop out ボタンでタブを独立ウィンドウとして分離。閉じるとメインウィンドウに自動返却。ターミナルセッション維持、preload 同期注入方式で Zustand persist との競合を排除 | [#11](https://github.com/MocA-Love/superset/pull/11) | 2026-03-29 | +| **タブカラー設定** | タブを右クリック → Set Color で13色から背景色を設定可能。ワークスペースセクションと同じカラーパレットを再利用。アクティブ/非アクティブで濃淡が変化し、設定は自動永続化 | [#12](https://github.com/MocA-Love/superset/pull/12) | 2026-03-29 | +| **クラッシュリカバリー強化** | macOS でアプリが白画面/フリーズする問題を修正。GPU クラッシュ時に最大化/フルスクリーンでもコンポジター再構築を実行、レンダラークラッシュ時の自動リロード/再起動、clipboard 操作のエラーハンドリング追加 | [#13](https://github.com/MocA-Love/superset/pull/13) | 2026-03-29 | +| **Excel 描画オブジェクト・斜線表示** | Excel ファイルの描画オブジェクト(線・矩形)とセル斜線を表示。xlsx ZIP から drawing XML を直接パースし、CSS transform 方式の SVG オーバーレイで正確に配置 | [#16](https://github.com/MocA-Love/superset/pull/16) | 2026-03-29 | +| **Chrome 拡張機能インストール** | Chrome Web Store の URL または拡張 ID からブラウザ拡張機能をインストール。CRX ダウンロード・展開、互換性チェック(Electron 非対応 API 検出)、設定画面での管理(有効/無効/削除)。BrowserPane ツールバーに拡張アイコンを表示し、クリックでポップアップウィンドウを表示。GPL ライブラリ不使用、Electron 標準 API のみで自前実装 | [#20](https://github.com/MocA-Love/superset/pull/20) | 2026-03-29 | +| **Excel diff インラインハイライト** | Excel 差分表示で変更セル内のテキスト差分を文字レベルでインライン表示。追加部分は緑、削除部分は赤+取り消し線。セルからはみ出る場合はホバーでツールチップにフル差分を表示 | [#19](https://github.com/MocA-Love/superset/pull/19) | 2026-03-29 | +| **Files タブのツールチップ** | ファイルツリーのファイル/フォルダ名にホバーで相対パスをツールチップ表示。ツールバーのトグルボタンで ON/OFF 切り替え、設定は永続化 | [#22](https://github.com/MocA-Love/superset/pull/22) | 2026-03-29 | +| **Inspect Element(右クリック検証)** | ブラウザペインの右クリックメニューに「Inspect Element」を追加。クリック位置の要素を直接 DevTools でインスペクト可能 | [#23](https://github.com/MocA-Love/superset/pull/23) | 2026-03-30 | +| **Branch ワークスペースの PR 表示対応** | worktree を切らない「branch」タイプのワークスペースでも Review タブに PR 情報・チェック結果・レビューコメントを表示。`getGitHubStatus` / `getGitHubPRComments` が worktree レコード必須だった制限を、`mainRepoPath` へのフォールバックで解消 | [#24](https://github.com/MocA-Love/superset/pull/24) | 2026-03-30 | +| **シェル履歴サジェスト** | ターミナル入力時に ~/.zsh_history からコマンド候補をドロップダウン表示。↑↓で選択、→で確定、Escで破棄。選択中コマンドのフルプレビュー付き(補完部分を緑色で強調)。8件超はスクロール、末尾到達で追加読み込み。設定画面から ON/OFF 切り替え可能 | [#24](https://github.com/MocA-Love/superset/pull/24) | 2026-03-30 | +| **Sentry エラー監視統合** | 自前の Sentry プロジェクトと連携可能。`.env` に `SENTRY_DSN_DESKTOP` を設定するだけで本番ビルドのクラッシュ・エラーを自動収集 | [#26](https://github.com/MocA-Love/superset/pull/26) | 2026-03-30 | +| **デスクトップ安定性修正** | シェル履歴サジェストが表示されないバグ(useEffect 依存配列の問題)、アプリ終了時の napi_fatal_error クラッシュ(SQLite 未クローズ)、webview パーキング後の getURL() エラー、サイドバーリサイズが webview 上で効かない問題を修正 | [#26](https://github.com/MocA-Love/superset/pull/26) | 2026-03-30 | +| **Review パネル強化** | GitHub Actions チェックを展開してジョブ内ステップの進捗を表示。レビューコメントを展開して Markdown レンダリング全文表示(GitHub Alerts 対応)。コメントのファイルパス+行番号クリックでエディタの該当行にジャンプ | [#27](https://github.com/MocA-Love/superset/pull/27) | 2026-03-30 | +| **サジェストバグ修正** | ドロップダウンのはみ出し防止(上側表示切替)、alternate screen(Claude Code等)中のサジェスト完全抑制(4層防御)、Agent操作中の非表示化、日本語文字化け修正(zsh metafied エンコーディング対応) | [#31](https://github.com/MocA-Love/superset/pull/31) | 2026-03-30 | +| **サジェスト履歴削除** | サジェスト一覧の各候補にバツボタンを追加し、クリックで ~/.zsh_history から直接削除。atomic write でファイル破損防止、metafied エンコーディング対応 | [#34](https://github.com/MocA-Love/superset/pull/34) | 2026-03-30 | +| **ブラウザアドレスバー選択修正** | アドレスバーでURLをマウスドラッグで範囲選択しようとするとペインが移動する問題を修正。input の mousedown イベント伝播を阻止 | [#34](https://github.com/MocA-Love/superset/pull/34) | 2026-03-30 | +| **git blame インライン表示** | ファイルビューアで行番号横に blame 情報をインライン表示。行ホバーで作者・コミットメッセージ・日時のポップアップを表示。表示タイミングを修正し、ファイル切り替え後も正しく動作 | [#38](https://github.com/MocA-Love/superset/pull/38) | 2026-03-31 | +| **マージコンフリクト解消 UI** | diff ビューア内でコンフリクトマーカーをインラインで検出し、VSCode スタイルの「Accept Current / Accept Incoming / Accept Both」ボタンを表示。ワンクリックでコンフリクトを解消可能 | [#38](https://github.com/MocA-Love/superset/pull/38) | 2026-03-31 | +| **GitGraph 詳細パネル修正** | GitGraph の詳細パネルがペイン外にはみ出る問題を修正。パネルの位置計算を改善し、画面端でも正しく収まるよう対応 | [#38](https://github.com/MocA-Love/superset/pull/38) | 2026-03-31 | +| **ConflictViewer 表示・スタイル修正** | ConflictViewer の表示条件とスタイルを修正 | [#38](https://github.com/MocA-Love/superset/pull/38) | 2026-03-31 | +| **ワークスペース切替・レビュー系 UX 強化** | Branch picker の検索・作成導線とブランチ情報表示を改善、blame tooltip に GitHub avatar を追加。ターミナル履歴サジェストの Enter/補完・プレビュー挙動を改善 | [#40](https://github.com/MocA-Love/superset/pull/40) | 2026-03-31 | +| **Review パネル URL ナビゲーション改善** | Review 内のコメント・PR タイトル・Markdown 内リンクを Superset のブラウザタブで新規開くよう統一。既存ブラウザタブの URL 差し替え問題を回避 | [#35](https://github.com/MocA-Love/superset/pull/35) | 2026-03-30 | +| **Problems / Database Explorer / Search 強化** | エディターの問題診断 `Problems` タブを追加し、Workspace 全体の警告・エラーを絞り込み・再取得・該当行ジャンプ可能に。右サイドバーへ Database Explorer と Search(glob/正規表現/置換)を追加 | [#44](https://github.com/MocA-Love/superset/pull/44) | 2026-04-01 | +| **言語診断の多言語対応拡張** | Diagnostics の LSP 基盤を外部 Language Server 化し、YAML / HTML / CSS / Python / Go / Rust / Dockerfile / GraphQL に対応。provider の ON/OFF 切替と runtime materialization を整備 | [#48](https://github.com/MocA-Love/superset/pull/48) | 2026-04-02 | +| **Docker サイドバーと検索・DB設定の大規模追加** | 右サイドバーに Docker ビューを追加してコンテナ/イメージ/ボリュームを管理。Search を木構造・仮想スクロール化し大量件数を高速化。workspace DB 設定の読み書き UI を追加 | [#51](https://github.com/MocA-Love/superset/pull/51) | 2026-04-02 | +| **ブラウザブックマーク管理** | ブックマークのフォルダ作成・ネスト・並び替え、Netscape HTML 形式のインポート/エクスポート、フォルダアイコン・カラー設定 | [#55](https://github.com/MocA-Love/superset/pull/55) | 2026-04-03 | +| **.env / CSV / TSV シンタックスハイライト** | `.env` / `.env.*` ファイルのシンタックスハイライト対応。CSV / TSV は列ごとにテーマカラーをローテーションして表示 | [#64](https://github.com/MocA-Love/superset/pull/64) | 2026-04-04 | +| **HTML ファイルプレビュー** | HTML ファイルをサンドボックス化された webview でレンダリング表示。ズーム操作(+/-/リセット)、リフレッシュボタン、ファイル変更時の自動リロード対応 | [#69](https://github.com/MocA-Love/superset/pull/69) [#77](https://github.com/MocA-Love/superset/pull/77) [#144](https://github.com/MocA-Love/superset/pull/144) | 2026-04-04 | +| **PDF ファイルプレビュー** | Chromium 内蔵の PDF ビューアを webview 経由で利用。ズーム・ページ送り・テキスト検索がそのまま使用可能 | [#70](https://github.com/MocA-Love/superset/pull/70) | 2026-04-04 | +| **GitHub Actions ログビューア** | Review タブの Checks から「View logs」でネイティブログ表示。ジョブ一覧+ステップ開閉式ログ、ANSI カラー対応、ログ検索、ログコピー(ANSI/タイムスタンプ除去)。Re-run ボタン、リアルタイムポーリング更新 | [#72](https://github.com/MocA-Love/superset/pull/72) [#73](https://github.com/MocA-Love/superset/pull/73) [#122](https://github.com/MocA-Love/superset/pull/122) | 2026-04-04 | +| **Workflow Dispatch UI** | workflow_dispatch の inputs(choice/boolean/string/number)を YAML からパースして UI 表示。ワークフロー実行後はリアルタイムでログに自動遷移 | [#75](https://github.com/MocA-Love/superset/pull/75) | 2026-04-04 | +| **フォークリポジトリ PR 対応** | fork / tracking remote / upstream が混在するリポジトリで PR の向き先候補を自動解決。base repository 選択 UI を追加 | [#71](https://github.com/MocA-Love/superset/pull/71) [#101](https://github.com/MocA-Love/superset/pull/101) | 2026-04-04 | +| **GitHub API 最適化** | 複数ポーリング経路を GitHubSyncService に統合。指数バックオフ付きレートリミッター、アクティブワークスペースのみポーリング(API calls/min: ~75 → ~15) | [#78](https://github.com/MocA-Love/superset/pull/78) [#80](https://github.com/MocA-Love/superset/pull/80) | 2026-04-05 | +| **Docker タブ UX 改善** | コンテナに Rebuild/Delete ボタンとステータス連動コントロールを追加。Database サイドバーをワークスペースごとにスコープ化。Dockerfile 単体プロジェクトでも Docker タブを表示 | [#69](https://github.com/MocA-Love/superset/pull/69) [#76](https://github.com/MocA-Love/superset/pull/76) [#79](https://github.com/MocA-Love/superset/pull/79) | 2026-04-04 | +| **Markdown / シンタックスハイライト強化** | CodeMirror で Lezer の全タグをカバーし VS Code 並のハイライト品質を実現。Markdown の fenced code blocks 内で 19 言語のネスト言語ハイライト対応 | [#90](https://github.com/MocA-Love/superset/pull/90) | 2026-04-06 | +| **VS Code Extension Host Shim** | VS Code 拡張機能ホストシム層を追加(約30 API をシム実装)。Claude Code 拡張の完全なチャット UI 表示・MCP 接続、Codex/ChatGPT 拡張のチャット UI 表示に対応。Webview 配信、Commands、Workspace API 等を実装 | [#91](https://github.com/MocA-Love/superset/pull/91) | 2026-04-06 | +| **インライン自動補完(Inception)** | FIM(Fill-in-the-Middle)を優先し Next Edit をフォールバックに使う補完フロー。Inception usage のローカル集計と設定画面表示。過剰発火の抑制 | [#92](https://github.com/MocA-Love/superset/pull/92) [#132](https://github.com/MocA-Love/superset/pull/132) | 2026-04-06 | +| **vscode.diff コマンド対応** | Codex 拡張の「Review changes」ボタンから Superset の diff viewer を直接開けるよう `vscode.diff` コマンドをシム実装 | [#104](https://github.com/MocA-Love/superset/pull/104) | 2026-04-08 | +| **メモタブ(Memo)** | `.superset/memos/` に保存されるメモを作成可能。Markdown エディタで画像を貼り付けると assets に保存し相対パスを自動挿入。自動保存対応 | [#129](https://github.com/MocA-Love/superset/pull/129) | 2026-04-09 | +| **右サイドバー初期幅設定** | 右サイドバーから開く Files や Changes diff ビューの初期幅を設定で変更可能に | [#130](https://github.com/MocA-Love/superset/pull/130) | 2026-04-09 | +| **リファレンスグラフ** | LSP 基盤を拡張し、シンボルの参照関係・呼び出し階層をインタラクティブなグラフで可視化。@xyflow/react + ELK.js による自動レイアウト、Shiki シンタックスハイライト統合、PNG エクスポート対応。エディタ右クリックから「Show Reference Graph」で起動 | [#147](https://github.com/MocA-Love/superset/pull/147) [#148](https://github.com/MocA-Love/superset/pull/148) | 2026-04-11 | +| **Git 操作ダイアログ統一** | Git 関連エラーとユーザー判断を統一 `GitOperationDialog` に集約。25 種類のエラー自動分類、merge-pr・bulk-stage-all・workflow-dispatch 等の確認ダイアログ、silent auto-repair 通知 | [#153](https://github.com/MocA-Love/superset/pull/153) | 2026-04-12 | +| **UX 改善バッチ** | Clone 進捗のストリーミング表示(プログレスバー+キャンセル)、Diff Viewer 内検索、タブ切替時の editor state 保持、Git サイドバーの複数選択 stage/unstage(Shift/Cmd+Click)、内蔵ブラウザの Cmd+F 検索 | [#154](https://github.com/MocA-Love/superset/pull/154) | 2026-04-13 | +| **Hover / Go-to-Definition** | エディタで変数・関数にホバーすると Markdown レンダリング対応の型情報・ドキュメントを表示。Shiki ベースのコードブロックハイライト付き。F12 / Cmd+Click / 右クリック「Go to Definition」で定義元にジャンプ。Cmd 押下時にトークンへ下線表示。TypeScript + 外部 LSP 対応 | [#156](https://github.com/MocA-Love/superset/pull/156) [#166](https://github.com/MocA-Love/superset/pull/166) | 2026-04-14 | +| **タブ分割ボタン** | タブツールバーに縦分割・横分割ボタンを追加。ワンクリックでペインを分割可能 | [#155](https://github.com/MocA-Love/superset/pull/155) | 2026-04-14 | +| **安定性・パフォーマンス改善** | LSP language services の安定性修正、拡張機能ホストのメモリリーク修正、ターミナル再表示遅延改善、認証切れ時の無限ループ防止、git status タイムアウト追加、ブラウザリダイレクトループ修正、ポップアウトウィンドウの認証修正、エラーの正規化と Sentry フィルタリング | [#88](https://github.com/MocA-Love/superset/pull/88) [#123](https://github.com/MocA-Love/superset/pull/123) [#121](https://github.com/MocA-Love/superset/pull/121) [#67](https://github.com/MocA-Love/superset/pull/67) [#66](https://github.com/MocA-Love/superset/pull/66) [#158](https://github.com/MocA-Love/superset/pull/158) [#146](https://github.com/MocA-Love/superset/pull/146) [#98](https://github.com/MocA-Love/superset/pull/98) | 2026-04-04〜14 | +| **内部ブラウザの File System Access API 拒否回避** | 内部ブラウザで react-dropzone 系サイトを開くと `FileSystemFileHandle.getFile()` が NotAllowedError で落ちる問題を修正。`persist:superset` セッションに preload を追加し `DataTransferItem.getAsFileSystemHandle()` を null 返却に差し替えて legacy D&D パスへフォールバック | [#207](https://github.com/MocA-Love/superset/pull/207) | 2026-04-16 | +| **PR コメント返信** | Review タブのコメント右上に Reply ボタンを追加。ダイアログから直接返信を投稿できる。レビュースレッドへの返信と通常 PR コメントの両方に対応 | [#206](https://github.com/MocA-Love/superset/pull/206) | 2026-04-16 | +| **TODO Agent スケジュール実行** | 毎日デプロイ / 毎時 lint のような定型 TODO を UI ビルダー (毎時/毎日/毎週/毎月/cron) で登録可能。アプリ起動中に時刻が来ると TODO セッションが自動作成され発火トーストを表示。前回未完了時は skip / queue 選択可 | [#211](https://github.com/MocA-Love/superset/pull/211) | 2026-04-16 | +| **TODO 詳細の添付画像 chip 化+プレビュー** | TODO 作成時に「やってほしいこと」「ゴール」へ貼り付けた画像を、タスク詳細画面でクリップマーク + ファイル名の chip として表示。クリックでネスト Dialog の画像プレビューを開ける(AgentManager は閉じない)。`todo-agent/attachments/` 配下のみを許可するパス検証付き `readAttachment` tRPC を追加 | [#229](https://github.com/MocA-Love/superset/pull/229) | 2026-04-16 | +| **AgentManager 見切れ救済** | AgentManager 左サイドバーのワークスペース見出し・セッションタイトル、右 ChangesSidebar のブランチ/ファイルパス/コミット subject/選択ヘッダーが狭幅で見切れていた問題を修正。`truncate` + ホバー時 `Tooltip` で全文表示 | [#254](https://github.com/MocA-Love/superset/pull/254) | 2026-04-17 | +| **Excel diff / raw viewer の透過抑止** | Appearance の透過設定 (vibrancy) ON 時に Excel ビューア / Excel diff / 画像プレビュー / HTML プレビューの背景まで透けていた問題を修正。これらの読み取り専用サーフェスは `bg-background-solid` に差し替えてダイアログと同様に不透明で維持 | [#266](https://github.com/MocA-Love/superset/pull/266) | 2026-04-17 | + +## Fork のビルド方法 (macOS) + +### 前提条件 + +- [Bun](https://bun.sh/) v1.0+ +- Git 2.20+ +- Xcode Command Line Tools (`xcode-select --install`) + +### 手順 + +```bash +# 1. リポジトリをクローン +git clone https://github.com/MocA-Love/superset.git +cd superset + +# 2. 依存関係のインストール +bun install + +# 3. デスクトップアプリをビルド +cd apps/desktop +SUPERSET_WORKSPACE_NAME=superset bun run build + +# 4. ビルド成果物を開く +open release +``` + +`release` フォルダ内の `.dmg` ファイルを開き、Superset.app を Applications にドラッグしてインストールしてください。 + +> **⚠️ ビルド時の注意**: `bun dev` でアプリを起動中にビルドすると、開発用の環境変数(`SUPERSET_WORKSPACE_NAME=default` 等)がバイナリに焼き込まれ、本番データ(`~/.superset/`)が参照されなくなります。ビルド時は必ず `SUPERSET_WORKSPACE_NAME=superset` を明示的に指定してください。 + +> **📦 上書きインストールについて**: 公式版の `.dmg` をフォーク版で上書きしても、ワークスペース・ターミナル履歴・設定はすべて `~/.superset/` に保持されるため、データが消えることはありません。 + +### 開発モードで実行 + +```bash +bun install +bun run dev --filter=@superset/desktop +``` + +--- + ## Code 10x Faster With No Switching Cost Superset orchestrates CLI-based coding agents across isolated git worktrees, with built-in terminal, review, and open-in-editor workflows. diff --git a/a.html b/a.html new file mode 100644 index 00000000000..d462412092a --- /dev/null +++ b/a.html @@ -0,0 +1,183 @@ + + + + + 画像添付アイコン候補 + + + + +
+
+

画像添付チップ・アイコン候補

+

+ Superset(shadcn/tailwind ベース) のダークテーマに合わせた候補。 + 実アプリで使われている react-icons hi2 / lu 系統から選定。 +

+
+ +
+
+
+
HiMiniPaperClip
+
+ Heroicons · 「添付」の普遍的表現
(PlusMenuで既に使用中) +
+
+ + + screenshot-2026.png + × + +
+ +
+
+
HiMiniPhoto
+
+ Heroicons · 画像固有(山+太陽)。現在の実装 +
+
+ + + screenshot-2026.png + × + +
+ +
+
+
HiMiniCamera
+
Heroicons · スクショ感
+
+ + + screenshot-2026.png + × + +
+ +
+
+
LuPaperclip
+
Lucide · 線画の軽快さ
+
+ + + screenshot-2026.png + × + +
+ +
+
+
LuImage
+
+ Lucide · 枠+山+円。outlineでシャープ
(RepositoryPanelで使用中) +
+
+ + + screenshot-2026.png + × + +
+ +
+
+
LuFileImage
+
+ Lucide · ファイル+画像。「画像ファイル」を明示 +
+
+ + + screenshot-2026.png + × + +
+ +
+
+
LuImageUp
+
Lucide · 画像+上向き矢印(アップロード)
+
+ + + screenshot-2026.png + × + +
+ +
+
+
LuImages
+
Lucide · 画像スタック、複数枚感
+
+ + + screenshot-2026.png + × + +
+
+ +
+

推奨順:

+
    +
  1. HiMiniPaperClip — Chat PlusMenuで「添付」として既に使用中。一貫性◎
  2. +
  3. LuPaperclip — 同じ意味で線画ベース、軽い印象
  4. +
  5. HiMiniPhoto — 画像固有、他UIと系統が合う (現実装)
  6. +
+
+
+ + diff --git a/apps/api/scripts/setup-automations-schedule.ts b/apps/api/scripts/setup-automations-schedule.ts new file mode 100644 index 00000000000..50d5c48bcc5 --- /dev/null +++ b/apps/api/scripts/setup-automations-schedule.ts @@ -0,0 +1,30 @@ +/** + * Idempotent: ensure a QStash schedule pings the automations evaluator every + * minute. Re-running is a no-op if a schedule already targets the same URL. + * + * Usage: + * NEXT_PUBLIC_API_URL=https://api.superset.sh \ + * QSTASH_TOKEN=... \ + * bun run apps/api/scripts/setup-automations-schedule.ts + */ +import { Client } from "@upstash/qstash"; + +const token = process.env.QSTASH_TOKEN; +const apiUrl = process.env.NEXT_PUBLIC_API_URL; + +if (!token) throw new Error("QSTASH_TOKEN is required"); +if (!apiUrl) throw new Error("NEXT_PUBLIC_API_URL is required"); + +const destination = `${apiUrl}/api/automations/evaluate`; +const cron = "* * * * *"; +const qstash = new Client({ token }); + +const existing = await qstash.schedules.list(); +const match = existing.find((s) => s.destination === destination); +if (match) { + console.log(`Schedule already exists: ${match.scheduleId} → ${destination}`); + process.exit(0); +} + +const scheduleId = await qstash.schedules.create({ destination, cron }); +console.log(`Created schedule ${scheduleId} → ${destination}`); diff --git a/apps/api/src/app/api/automations/evaluate/route.ts b/apps/api/src/app/api/automations/evaluate/route.ts index f0c8671c087..16127e8ebb6 100644 --- a/apps/api/src/app/api/automations/evaluate/route.ts +++ b/apps/api/src/app/api/automations/evaluate/route.ts @@ -8,10 +8,7 @@ import { env } from "@/env"; export const dynamic = "force-dynamic"; -const qstash = new Client({ - token: env.QSTASH_TOKEN, - baseUrl: env.QSTASH_URL, -}); +const qstash = new Client({ token: env.QSTASH_TOKEN }); const receiver = new Receiver({ currentSigningKey: env.QSTASH_CURRENT_SIGNING_KEY, nextSigningKey: env.QSTASH_NEXT_SIGNING_KEY, @@ -62,7 +59,7 @@ export async function POST(request: Request): Promise { automationId: automation.id, scheduledFor: scheduledFor.toISOString(), }, - deduplicationId: `${automation.id}_${scheduledFor.getTime()}`, + deduplicationId: `${automation.id}:${scheduledFor.toISOString()}`, retries: 2, failureCallback: `${env.NEXT_PUBLIC_API_URL}/api/automations/run-failed`, }; diff --git a/apps/api/src/app/api/trpc/[trpc]/route.ts b/apps/api/src/app/api/trpc/[trpc]/route.ts index 89838aab968..939747ac08a 100644 --- a/apps/api/src/app/api/trpc/[trpc]/route.ts +++ b/apps/api/src/app/api/trpc/[trpc]/route.ts @@ -2,6 +2,9 @@ import { appRouter } from "@superset/trpc"; import { fetchRequestHandler } from "@trpc/server/adapters/fetch"; import { createContext } from "@/trpc/context"; +// Some procedures (automation.runNow → dispatchAutomation) do a workspace +// create over the relay which can take 30-60s for large repos. The default +// Vercel function timeout is too tight. export const maxDuration = 60; const handler = (req: Request) => diff --git a/apps/api/src/env.ts b/apps/api/src/env.ts index 117ed25046a..c8d7a44afe6 100644 --- a/apps/api/src/env.ts +++ b/apps/api/src/env.ts @@ -27,7 +27,6 @@ export const env = createEnv({ SLACK_SIGNING_SECRET: z.string(), ANTHROPIC_API_KEY: z.string(), QSTASH_TOKEN: z.string().min(1), - QSTASH_URL: z.string().url(), QSTASH_CURRENT_SIGNING_KEY: z.string().min(1), QSTASH_NEXT_SIGNING_KEY: z.string().min(1), RESEND_API_KEY: z.string(), diff --git a/plans/v2-pr-link-command-design.md b/apps/desktop/V2_PR_LINK_COMMAND_DESIGN.md similarity index 100% rename from plans/v2-pr-link-command-design.md rename to apps/desktop/V2_PR_LINK_COMMAND_DESIGN.md diff --git a/apps/desktop/V2_WORKSPACE_MODAL_GAPS.md b/apps/desktop/V2_WORKSPACE_MODAL_GAPS.md index d0db24298c3..282aa33f170 100644 --- a/apps/desktop/V2_WORKSPACE_MODAL_GAPS.md +++ b/apps/desktop/V2_WORKSPACE_MODAL_GAPS.md @@ -1,18 +1,6 @@ # V2 Workspace Creation Modal — Gap Analysis vs V1 -> Generated 2026-04-11. Last updated 2026-04-12. Compares V2 (`DashboardNewWorkspaceModal`) against V1 (`NewWorkspaceModal`). - -## Status Summary - -| # | Gap | Status | -|---|-----|--------| -| 1 | Project Picker — Open/New project actions | Open | -| 2 | Branch Picker — Worktree awareness | Open | -| 3 | AI Branch Name Generation | Open | -| 4 | GitHub Issue Content Auto-Fetching | Open | -| 5 | Agent Launch Request Building | Open | -| 6 | Dedicated "Create from PR" Flow | Open | -| 7 | PR URL Parsing and Cross-Repo Validation | ✅ Resolved (PR #3356) — extended to issues | +> Generated 2026-04-11. Compares V2 (`DashboardNewWorkspaceModal`) against V1 (`NewWorkspaceModal`). ## File References @@ -99,17 +87,14 @@ --- -### 7. PR URL Parsing and Cross-Repo Validation — ✅ Resolved (PR #3356) +### 7. PR URL Parsing and Cross-Repo Validation **V1**: `PRLinkCommand` parses pasted GitHub PR URLs (`github.com/:owner/:repo/pull/:number`), detects cross-repository links, and shows an error ("PR URL must match {repo}") for mismatched repos. -**V2 (resolved)**: Server-side `normalizeGitHubQuery` in host-service handles URL parsing, `#123` / bare-number shorthand, and cross-repo validation. Response returns `{ repoMismatch: "owner/repo" }` and client shows "PR URL must match owner/repo." Same normalization also extended to `searchGitHubIssues`. Debounce-gap loading state (`isPendingDebounce`) added to prevent empty-state flash. +**V2**: `PRLinkCommand` uses host-service `searchPullRequests` endpoint only. No client-side URL parsing or cross-repo validation. -**Resolved by**: PR #3356 (merged 2026-04-11) -**Refs**: -- `packages/host-service/src/trpc/router/workspace-creation/normalize-github-query.ts` -- `…/PromptGroup/components/PRLinkCommand/PRLinkCommand.tsx` -- `…/PromptGroup/components/GitHubIssueLinkCommand/GitHubIssueLinkCommand.tsx` +**V1 ref**: `PRLinkCommand.tsx:37-53, 86-97` +**V2 ref**: `PRLinkCommand.tsx` (V2 version) --- @@ -119,7 +104,7 @@ --- -## Priority Assessment (remaining) +## Priority Assessment | # | Gap | Impact | Effort | |---|-----|--------|--------| @@ -129,4 +114,4 @@ | 6 | Dedicated "create from PR" flow | Medium — PR workspaces may not set up branches properly | Medium | | 2 | Branch picker worktree awareness | Medium — can't discover/open existing worktrees | High | | 1 | Project picker open/new actions | Low — can do this outside the modal | Low | -| ~~7~~ | ~~PR URL parsing / cross-repo validation~~ | ✅ Resolved by #3356 | — | +| 7 | PR URL parsing / cross-repo validation | Low — server search covers most cases | Low | diff --git a/apps/desktop/docs/EXTERNAL_FILES.md b/apps/desktop/docs/EXTERNAL_FILES.md index 5cc01ad97cb..471bb2970e2 100644 --- a/apps/desktop/docs/EXTERNAL_FILES.md +++ b/apps/desktop/docs/EXTERNAL_FILES.md @@ -45,13 +45,15 @@ its hook entries into these files while preserving user-defined entries: | `~/.codex/hooks.json` | Codex hook registration merge (`SessionStart`, `UserPromptSubmit`, `Stop`) | | `~/.factory/settings.json` | Factory Droid hook registration (`UserPromptSubmit`, `Notification`, `PostToolUse`, `Stop`) | -For Codex specifically, Superset now relies on native `~/.codex/hooks.json` -registration for durable prompt/tool lifecycle events, while the wrapper in -`~/.superset[-{workspace}]/bin/codex` still injects `notify` and keeps the -session-log watcher as a best-effort compatibility bridge for older Codex -releases. On startup, Superset rewrites only its own managed entries in -`~/.codex/hooks.json` to point at the current environment's `notify.sh`, while -preserving any user-defined Codex hooks. +For Codex specifically, Superset relies on native `~/.codex/hooks.json` +registration as the sole source of completion notifications. The wrapper in +`~/.superset[-{workspace}]/bin/codex` only enables `codex_hooks` (by passing +`--enable codex_hooks` to the real binary) and keeps the session-log watcher +as a best-effort bridge for per-prompt Start notifications and permission +requests inside Superset terminals. It no longer injects `--notify=[...]` to +avoid duplicate `/hook/complete` POSTs. On startup, Superset rewrites only its +own managed entries in `~/.codex/hooks.json` to point at the current +environment's `notify.sh`, while preserving any user-defined Codex hooks. ### `zsh/` and `bash/` - Shell Integration diff --git a/apps/desktop/docs/LANGUAGE_SERVICES.md b/apps/desktop/docs/LANGUAGE_SERVICES.md new file mode 100644 index 00000000000..99d8347102c --- /dev/null +++ b/apps/desktop/docs/LANGUAGE_SERVICES.md @@ -0,0 +1,128 @@ +# Desktop Language Services + +This document tracks the IDE-oriented diagnostics stack used by the desktop app. + +## Goals + +- Keep editor and sidebar UI stable while adding language-specific diagnostics. +- Match VS Code behavior as closely as practical for each language. +- Make it easy to add new providers behind the same manager/store/router flow. + +## Current Providers + +### TypeScript / JavaScript / TSX / JSX + +- Backend: `tsserver` +- Reason: VS Code uses `tsserver` for TypeScript and JavaScript language features, so this is the closest path to parity. +- Source: + - https://github.com/microsoft/TypeScript/wiki/Standalone-Server-%28tsserver%29 + +### JSON / JSONC + +- Backend: `vscode-json-languageservice` +- Reason: This is the JSON language service used in the VS Code ecosystem and supports schema-backed validation. +- Source: + - https://github.com/microsoft/vscode-json-languageservice + +### YAML + +- Backend: `yaml-language-server` +- Reason: This is the YAML language server used by the Red Hat YAML extension and supports schema-backed validation through SchemaStore. +- Source: + - https://github.com/redhat-developer/yaml-language-server + +### HTML + +- Backend: `vscode-html-language-server` from `vscode-langservers-extracted` +- Reason: The language service package itself does not expose diagnostics, so HTML now uses the bundled VS Code language server path. +- Source: + - https://www.npmjs.com/package/vscode-langservers-extracted + +### CSS / SCSS / LESS + +- Backend: `vscode-css-languageservice` +- Reason: This is the CSS language service used in the VS Code ecosystem. +- Source: + - https://github.com/microsoft/vscode-css-languageservice + +### TOML + +- Backend: `@taplo/lib` +- Reason: Taplo is the de facto TOML toolkit with a maintained JavaScript/WASM entrypoint suitable for desktop embedding. +- Source: + - https://taplo.tamasfe.dev/lib/javascript/lib.html + +### Dart / Flutter + +- Backend: Dart language server via `dart language-server` +- Reason: This matches the official Dart analysis server/LSP flow and works for both Dart and Flutter projects. +- Sources: + - https://dart.dev/tools/analysis-server + - https://raw.githubusercontent.com/dart-lang/sdk/main/pkg/analysis_server/tool/lsp_spec/README.md + +### Python + +- Backend: `pyright-langserver` +- Reason: Pyright is the TypeScript-based Python language server behind the Pyright ecosystem and is close to the VS Code extension path. +- Source: + - https://github.com/microsoft/pyright + +### Go + +- Backend: `gopls` +- Reason: `gopls` is the official Go language server maintained by the Go team. +- Source: + - https://go.dev/gopls/ + +### Rust + +- Backend: `rust-analyzer` +- Reason: `rust-analyzer` is the standard Rust language server used by most editors, including VS Code setups. +- Source: + - https://rust-analyzer.github.io/book/ + +### Dockerfile + +- Backend: `dockerfile-language-server-nodejs` +- Reason: This is the Dockerfile language server used by the VS Code Docker tooling ecosystem. +- Source: + - https://github.com/rcjsuen/dockerfile-language-server-nodejs + +### GraphQL + +- Backend: `graphql-language-service-cli` +- Reason: This provides the `graphql-lsp` server from the GraphiQL language-service stack. +- Source: + - https://github.com/graphql/graphiql/tree/main/packages/graphql-language-service-cli + +## Architecture + +- `main/lib/language-services/manager.ts` + - Registers providers + - Tracks provider enable/disable state + - Produces workspace snapshots for the Problems view +- `main/lib/language-services/diagnostics-store.ts` + - Holds normalized diagnostics per provider/file/workspace +- `main/lib/language-services/lsp/StdioJsonRpcClient.ts` + - Shared stdio JSON-RPC transport for LSP-based providers +- `main/lib/language-services/lsp/ExternalLspLanguageProvider.ts` + - Shared LSP provider implementation for stdio-based language servers +- `renderer/providers/LanguageServicesProvider` + - Syncs open editor documents to enabled providers +- `renderer/routes/_authenticated/settings/behavior/components/DiagnosticsSettings` + - Lets users toggle providers on or off + +## Adding a New Provider + +1. Implement `LanguageServiceProvider`. +2. Normalize diagnostics into `LanguageServiceDiagnostic`. +3. Register the provider in `LanguageServiceManager`. +4. Add a renderer-side language mapping in `LanguageServicesProvider`. +5. Add syntax highlighting support if needed in `detect-language.ts` and `loadLanguageSupport.ts`. +6. Extend the settings store/provider ID union if the provider should be user-toggleable. + +## Runtime Notes + +- TypeScript, Python, YAML, Dockerfile and GraphQL diagnostics are bundled from Node packages and launched with `ELECTRON_RUN_AS_NODE=1`. +- Go diagnostics require `gopls` to be available on the user's PATH. +- Rust diagnostics require `rust-analyzer` to be available on the user's PATH. diff --git a/apps/desktop/electron-builder.ts b/apps/desktop/electron-builder.ts index 2c33562eb2c..bcab6a2f253 100644 --- a/apps/desktop/electron-builder.ts +++ b/apps/desktop/electron-builder.ts @@ -69,6 +69,15 @@ const config: Configuration = { to: "resources/host-migrations", filter: ["**/*"], }, + // Standalone `superset-browser-mcp` binary produced by + // `bun build --compile`. Shipped with the app so users register it + // into Claude Code / Codex via one command with an absolute path + // and never need npm or a separate install step. + { + from: "../../packages/superset-browser-mcp/dist", + to: "resources/superset-browser-mcp", + filter: ["superset-browser-mcp", "superset-browser-mcp.exe"], + }, ], files: [ @@ -90,8 +99,13 @@ const config: Configuration = { // Rebuild native modules for Electron's Node.js version npmRebuild: true, - // macOS DMG installer + // macOS DMG + // NOTE: dmgbuild 1.2.0 は size = (sum(app files) + 128MB) を割り当てるが、 + // HFS+ のカタログ/ジャーナル overhead (≈150MB+) を無視するため、app が 1.8GB を超えると + // ditto が "No space left on device" で失敗し、最後にコピーされる Electron Framework + // バイナリ (167MB) が欠落した dmg が生成される。size を明示してバイパスする。 dmg: { + size: "4g", ...(existsSync(dmgBackgroundPath) ? { background: dmgBackgroundPath } : {}), }, @@ -114,6 +128,9 @@ const config: Configuration = { // Required for macOS microphone permission prompt NSMicrophoneUsageDescription: "Superset needs microphone access so voice-enabled tools like Codex transcription can capture audio input.", + // Required for macOS camera permission prompt + NSCameraUsageDescription: + "Superset needs camera access so websites and tools running inside the app can capture video input.", // Required for macOS local network permission prompt NSLocalNetworkUsageDescription: "Superset needs access to your local network to discover and connect to development servers running on your network.", diff --git a/apps/desktop/electron.vite.config.ts b/apps/desktop/electron.vite.config.ts index 5f073935c45..3637b123c23 100644 --- a/apps/desktop/electron.vite.config.ts +++ b/apps/desktop/electron.vite.config.ts @@ -34,14 +34,17 @@ const workspaceDependencies = Object.keys(dependencies).filter((dependency) => ); // Sentry plugin for uploading sourcemaps (only in CI with auth token) -const sentryPlugin = process.env.SENTRY_AUTH_TOKEN - ? sentryVitePlugin({ - org: "superset-sh", - project: "desktop", - authToken: process.env.SENTRY_AUTH_TOKEN, - release: { name: version }, - }) - : null; +const sentryPlugin = + process.env.SENTRY_AUTH_TOKEN && + process.env.SENTRY_ORG && + process.env.SENTRY_PROJECT + ? sentryVitePlugin({ + org: process.env.SENTRY_ORG, + project: process.env.SENTRY_PROJECT, + authToken: process.env.SENTRY_AUTH_TOKEN, + release: { name: version }, + }) + : null; export default defineConfig({ main: { @@ -107,10 +110,17 @@ export default defineConfig({ "terminal-host": resolve("src/main/terminal-host/index.ts"), // PTY subprocess - spawned by terminal-host for each terminal "pty-subprocess": resolve("src/main/terminal-host/pty-subprocess.ts"), + // TODO agent daemon - owns `claude -p` children so autonomous + // TODO sessions survive app restarts (issue #237). + "todo-daemon": resolve("src/main/todo-daemon/index.ts"), // Worker-thread entrypoint for heavy git/status computations "git-task-worker": resolve("src/main/git-task-worker.ts"), // Workspace service - local HTTP/tRPC server per org "host-service": resolve("src/main/host-service/index.ts"), + // VS Code extension host worker - one per active workspace + "extension-host-worker": resolve( + "src/main/extension-host-worker/index.ts", + ), }, output: { dir: resolve(devPath, "main"), @@ -154,6 +164,7 @@ export default defineConfig({ rollupOptions: { input: { index: resolve("src/preload/index.ts"), + "webview-compat": resolve("src/preload/webview-compat.ts"), }, }, }, @@ -179,6 +190,10 @@ export default defineConfig({ process.env.NEXT_PUBLIC_MARKETING_URL, "https://superset.sh", ), + "process.env.NEXT_PUBLIC_OPEN_LINK_URL": defineEnv( + process.env.NEXT_PUBLIC_OPEN_LINK_URL, + "https://superset.m4gu.dev", + ), "process.env.NEXT_PUBLIC_ELECTRIC_URL": defineEnv( process.env.NEXT_PUBLIC_ELECTRIC_URL, "https://electric-proxy.avi-6ac.workers.dev", diff --git a/apps/desktop/package.json b/apps/desktop/package.json index add4db3c52b..876228de500 100644 --- a/apps/desktop/package.json +++ b/apps/desktop/package.json @@ -18,13 +18,14 @@ "start": "electron-vite preview", "generate:icons": "bun run scripts/generate-file-icons.ts", "predev": "cross-env NODE_ENV=development bun run clean:dev && bun run generate:icons && cross-env NODE_ENV=development bun run scripts/clean-launch-services.ts && cross-env NODE_ENV=development bun run scripts/patch-dev-protocol.ts", - "dev": "cross-env NODE_ENV=development NODE_OPTIONS=--max-old-space-size=8192 electron-vite dev --watch", - "compile:app": "cross-env NODE_OPTIONS=--max-old-space-size=8192 electron-vite build", + "dev": "cross-env NODE_ENV=development NODE_OPTIONS=--max-old-space-size=12288 electron-vite dev --watch", + "compile:app": "cross-env NODE_OPTIONS=--max-old-space-size=12288 electron-vite build", "copy:native-modules": "bun run scripts/copy-native-modules.ts", "validate:native-runtime": "bun run scripts/validate-native-runtime.ts", - "prebuild": "bun run clean:dev && bun run generate:icons && bun run compile:app && bun run copy:native-modules && bun run validate:native-runtime", + "build:browser-mcp": "bun --cwd ../../packages/superset-browser-mcp run build:bin", + "prebuild": "bun run clean:dev && bun run generate:icons && bun run compile:app && bun run copy:native-modules && bun run validate:native-runtime && bun run build:browser-mcp", "build": "cross-env CSC_IDENTITY_AUTO_DISCOVERY=false electron-builder --publish never", - "prepackage": "bun run copy:native-modules && bun run validate:native-runtime", + "prepackage": "bun run copy:native-modules && bun run validate:native-runtime && bun run build:browser-mcp", "package": "electron-builder --config electron-builder.ts", "install:deps": "electron-builder install-app-deps", "release": "electron-builder --publish always", @@ -58,6 +59,7 @@ "@codemirror/lang-yaml": "^6.1.2", "@codemirror/language": "^6.12.2", "@codemirror/legacy-modes": "^6.5.2", + "@codemirror/merge": "^6.12.1", "@codemirror/search": "^6.6.0", "@codemirror/state": "^6.5.4", "@codemirror/theme-one-dark": "^6.1.3", @@ -72,7 +74,7 @@ "@hono/node-server": "^1.14.1", "@hookform/resolvers": "^5.2.2", "@lezer/highlight": "^1.2.3", - "@mastra/core": "1.26.0-alpha.3", + "@mastra/core": "1.25.0", "@parcel/watcher": "^2.5.6", "@pierre/diffs": "1.1.3", "@radix-ui/react-dialog": "^1.1.15", @@ -87,6 +89,7 @@ "@superset/host-service": "workspace:*", "@superset/local-db": "workspace:*", "@superset/macos-process-metrics": "workspace:*", + "@superset/macos-window-blur": "workspace:*", "@superset/panes": "workspace:*", "@superset/shared": "workspace:*", "@superset/trpc": "workspace:*", @@ -101,6 +104,7 @@ "@tanstack/react-router": "^1.147.3", "@tanstack/react-table": "^8.21.3", "@tanstack/react-virtual": "^3.13.18", + "@taplo/lib": "^0.5.0", "@tiptap/core": "^3.17.1", "@tiptap/extension-blockquote": "^3.17.1", "@tiptap/extension-bold": "^3.17.1", @@ -141,6 +145,7 @@ "@types/express": "^5.0.5", "@types/pidusage": "^2.0.5", "@vercel/blob": "^2.0.0", + "@vscode/ripgrep": "^1.15.9", "@xterm/addon-clipboard": "0.3.0-beta.197", "@xterm/addon-fit": "0.12.0-beta.197", "@xterm/addon-image": "0.10.0-beta.197", @@ -152,21 +157,32 @@ "@xterm/addon-webgl": "0.20.0-beta.196", "@xterm/headless": "6.1.0-beta.197", "@xterm/xterm": "6.1.0-beta.197", + "@xyflow/react": "^12.10.0", "ai": "^6.0.0", + "ansi_up": "^6.0.6", "better-auth": "1.6.5", "better-sqlite3": "12.6.2", "bindings": "^1.5.0", "bufferutil": "^4.1.0", "clsx": "^2.1.1", + "cron-parser": "^5.5.0", + "cronstrue": "^3.14.0", "culori": "^4.0.2", "date-fns": "^4.1.0", "default-shell": "^2.2.0", "detect-libc": "2.0.4", "dexie": "^4.4.2", + "diff": "^7.0.0", "dnd-core": "^16.0.1", + "dockerfile-ast": "0.7.1", + "dockerfile-language-server-nodejs": "^0.15.0", + "dockerfile-language-service": "0.16.1", + "dockerfile-utils": "0.16.3", "dotenv": "^17.3.1", "drizzle-orm": "0.45.2", "electron-updater": "^6.8.3", + "elkjs": "^0.11.1", + "exceljs": "^4.4.0", "execa": "^9.6.0", "express": "^5.1.0", "fast-glob": "^3.3.3", @@ -174,32 +190,41 @@ "framer-motion": "^12.23.26", "friendly-words": "^1.3.1", "fuse.js": "^7.1.0", + "graphql": "^16.13.2", + "graphql-language-service-cli": "^3.5.0", "highlight.js": "^11.11.1", + "html-to-image": "^1.11.13", "http-proxy": "^1.18.1", + "https-proxy-agent": "^7.0.2", "idb-keyval": "^6.2.2", "jose": "^6.1.3", + "js-yaml": "^4.1.1", + "jszip": "^3.10.1", "libsql": "0.5.22", "line-column-path": "^3.0.0", "lodash": "^4.17.21", "lowdb": "^7.0.1", "lowlight": "^3.3.0", "lucide-react": "^0.563.0", - "mastracode": "0.15.0-alpha.3", + "mastracode": "0.14.0", "nanoid": "^5.1.6", "node-addon-api": "^7.1.0", "node-pty": "1.1.0", "os-locale": "^6.0.2", + "pg": "8.20.0", "pidtree": "^0.6.0", "pidusage": "^4.0.1", "posthog-js": "1.310.1", "posthog-node": "^5.24.7", "prebuild-install": "^7.1.1", + "proxy-from-env": "^1.1.0", + "pyright": "^1.1.408", "react": "19.2.0", "react-dnd": "^16.0.1", "react-dnd-html5-backend": "^16.0.1", "react-dom": "19.2.0", "react-hook-form": "^7.71.1", - "react-hotkeys-hook": "^5.2.4", + "react-hotkeys-hook": "5.2.4", "react-icons": "^5.5.0", "react-markdown": "^10.1.0", "react-mosaic-component": "^6.1.1", @@ -208,6 +233,7 @@ "rehype-raw": "^7.0.0", "rehype-sanitize": "^6.0.0", "remark-gfm": "^4.0.1", + "remark-github-blockquote-alert": "^2.1.0", "semver": "^7.7.3", "shell-env": "^4.0.3", "shell-quote": "^1.8.3", @@ -225,6 +251,15 @@ "use-resize-observer": "^9.1.0", "utf-8-validate": "^6.0.6", "uuid": "^14.0.0", + "vscode-css-languageservice": "^6.3.10", + "vscode-html-languageservice": "^5.6.2", + "vscode-json-languageservice": "^5.7.2", + "vscode-langservers-extracted": "^4.10.0", + "vscode-languageserver-textdocument": "^1.0.12", + "vscode-languageserver-types": "3.17.3", + "ws": "^8.18.0", + "yaml-language-server": "^1.21.0", + "yauzl": "^2.9.2", "zod": "^4.3.5", "zustand": "^5.0.8" }, @@ -237,14 +272,18 @@ "@types/better-sqlite3": "^7.6.13", "@types/bun": "^1.2.17", "@types/culori": "^4.0.1", + "@types/diff": "^6.0.0", "@types/http-proxy": "^1.17.17", + "@types/js-yaml": "^4.0.9", "@types/lodash": "^4.17.20", "@types/node": "^24.9.1", + "@types/pg": "8.15.6", "@types/react": "~19.2.2", "@types/react-dom": "^19.2.3", "@types/react-syntax-highlighter": "^15.5.13", "@types/semver": "^7.7.1", "@types/shell-quote": "^1.7.5", + "@types/ws": "^8.18.1", "@vitejs/plugin-react": "^5.0.1", "code-inspector-plugin": "^1.2.2", "cross-env": "^10.0.0", diff --git a/apps/desktop/plans/20260405-quit-tray-lifecycle.md b/apps/desktop/plans/20260405-quit-tray-lifecycle.md index ec31c8ba75d..6b527810611 100644 --- a/apps/desktop/plans/20260405-quit-tray-lifecycle.md +++ b/apps/desktop/plans/20260405-quit-tray-lifecycle.md @@ -1,6 +1,6 @@ # macOS Quit & Tray Lifecycle -## Decision (2026-04-05) +## Decision (2025-04-05) All quit paths fully exit the app. No background-to-tray behavior for now. @@ -8,9 +8,11 @@ The tray exists while the app is running and provides host-service management an ### What shipped -- **Removed macOS background-to-tray block** from `before-quit` (#3205). The old block prevented quit and kept tray alive when `hasActiveInstances()` was true, but left the dock icon visible (confusing UX). -- **Updater fix**: `installUpdate()` calls `quitAndInstall()` then `exitImmediately()`, bypassing the quit protocol entirely. The old `prepareQuit("release")` approach coupled the updater to the quit lifecycle unnecessarily. -- **Hardened `before-quit` cleanup**: host-service cleanup wrapped in try/catch so `app.exit(0)` always runs. Without this, an exception in cleanup would skip `app.exit(0)`, and the macOS window close handler (`event.preventDefault()` + `hide()`, added in #3157) would block the quit. +- **Lifecycle intents** (`exit_release`, `exit_stop`, `restart`) replace the overloaded `QuitMode` (`"release" | "stop"`). Explicit intents skip the confirm-on-quit dialog and route directly to the exit path. +- **Updater fix**: `installUpdate()` uses `prepareIntent("exit_release")` so `before-quit` skips the confirm dialog and exits cleanly. The old `prepareQuit("release")` was intercepted by the macOS background-to-tray block when services were active, preventing updates from installing. +- **Tray menu rename**: "Quit (Keep Services Running)" is now "Quit Superset" for clarity. +- **Restart consolidation**: `restartApp` tRPC endpoint uses `requestExit("restart")` instead of manual `app.relaunch()` + `app.exit(0)`. +- **Removed macOS background-to-tray block** from `before-quit`. The old block prevented quit and kept tray alive when `hasActiveInstances()` was true, but left the dock icon visible (confusing UX). ### What was deferred @@ -29,19 +31,21 @@ Background-to-tray on macOS (Cmd+Q destroys windows but keeps tray alive) is the | Dock right-click Quit | Same | | App menu Quit | Same | | Window close (red-X / Cmd+W) | macOS: hide window (standard behavior). Non-macOS: close window, then app quits. | -| Tray "Quit (Keep Services Running)" | `requestQuit("release")` — release services, full exit | -| Tray "Quit & Stop Services" | `requestQuit("stop")` — stop services, full exit | +| Tray "Quit Superset" | `requestExit("exit_release")` — release services, full exit | +| Tray "Quit & Stop Services" | `requestExit("exit_stop")` — stop services, full exit | | Tray host-service "Stop" | Stops individual service, app stays running | -| Update install | `quitAndInstall()` + `exitImmediately()` — bypasses quit protocol | +| Settings "Restart App" | `requestExit("restart")` — release services, relaunch, exit | +| Update install | `prepareIntent("exit_release")` + `quitAndInstall()` — full exit, updater handles install | ### Host-service lifecycle on quit -- **Release** (default): services keep running as detached processes. On next app launch, they are re-adopted via manifest files. -- **Stop** (`requestQuit("stop")`): services are terminated via `SIGTERM`. +- **Release** (`exit_release`, implicit quit): services keep running as detached processes. On next app launch, they are re-adopted via manifest files. +- **Stop** (`exit_stop`): services are terminated via `SIGTERM`. ### Key files -- `src/main/index.ts` — `before-quit` handler, `requestQuit`, `exitImmediately` +- `src/main/lib/lifecycle.ts` — lifecycle intent model +- `src/main/index.ts` — `before-quit` handler - `src/main/windows/main.ts` — window close behavior - `src/main/lib/tray/index.ts` — tray menu and actions - `src/main/lib/auto-updater.ts` — update install flow diff --git a/apps/desktop/plans/20260416-todo-schedule.md b/apps/desktop/plans/20260416-todo-schedule.md new file mode 100644 index 00000000000..8fbf7afc181 --- /dev/null +++ b/apps/desktop/plans/20260416-todo-schedule.md @@ -0,0 +1,186 @@ +# TODO Agent スケジュール実行 実装計画 + +既存の TODO 自律エージェントに **cron ライクな定期実行** 機能を追加する。 +ユーザーはスケジュールを登録しておくと、指定時刻にそのプロンプトで TODO セッションが自動作成・キュー投入される。 + +## 目的 + +- 「毎日 9:00 にデプロイ」「1時間ごとに lint 走らせる」のような + 定型的な AI タスクを手動トリガーなしで実行できる。 +- 既存の TODO 作成フロー・実行エンジン (supervisor) をそのまま再利用し、 + スケジュール層は薄く、単純にトリガー役に徹する。 +- フォーク限定機能。`apps/desktop` 内に閉じる。 + +## 前提 (ユーザー決定事項) + +1. 発火通知は **トースト** +2. cron 式の直接入力ではなく **UX 重視のビルダー UI** (プリセット + カスタム) +3. 前回実行中の発火時の挙動 (skip / queue) は **スケジュール毎にユーザーが選択** +4. UI は TodoManager **内に統合** (独立モーダルにはしない) + +## 非目的 (v1) + +- missed firing の補完 (閉じてた間の発火を後で実行): 初回は **skip + 通知のみ** +- タイムゾーン切替: ローカル TZ 固定 +- スケジュール間の依存関係 / 順序制御 +- スケジュール共有 (エクスポート/インポート) + +## アーキテクチャ + +``` +Renderer Main process +──────── ──────────── +TodoManager TodoScheduler (singleton) + └─ SchedulesSection ├─ tick (setInterval every 30s) + ├─ ScheduleList ├─ compute nextRunAt for each schedule + └─ ScheduleEditor │ and compare to now + └─ ScheduleFrequencyPicker ├─ on fire: + │ ├─ check overlap mode + │ ├─ call TodoSupervisor.createFromSchedule() + │ └─ emit `schedule.fired` event + └─ scheduleStore (SQLite) + +trpc todoAgent.schedule.* ─► scheduleStore CRUD +trpc todoAgent.schedule.onFire ─► observable + (for toast in renderer) +``` + +## DB schema (`packages/local-db/src/schema/todo-schedules.ts`) + +```ts +todo_schedules { + id: text pk + workspaceId: text (FK workspaces, cascade) + projectId: text (FK projects, set null) + name: text not null -- 表示名 + enabled: int bool not null dflt 1 + + -- スケジュール定義 (UI ビルダー経由で設定) + frequency: text enum("hourly","daily","weekly","monthly","custom") not null + minute: int -- 0-59 (hourly+) + hour: int -- 0-23 (daily+) + weekday: int -- 0-6, 0=Sun (weekly) + monthday: int -- 1-31 (monthly) + cronExpr: text -- frequency=custom のときのみ + + -- 発火時に作成する TODO の雛形 + title: text not null + description: text not null + goal: text + verifyCommand: text + maxIterations: int not null dflt 10 + maxWallClockSec: int not null dflt 1800 + customSystemPrompt:text + + overlapMode: text enum("skip","queue") not null dflt "skip" + + lastRunAt: int + lastRunSessionId: text + nextRunAt: int -- 予測値。tick で使う + createdAt: int + updatedAt: int +} + +index (workspaceId), (enabled, nextRunAt) +``` + +マイグレーション生成: +```sh +cd packages/local-db +bun run generate --name=add_todo_schedules +``` + +## スケジューラ (`apps/desktop/src/main/todo-agent/scheduler.ts`) + +- `setInterval(tick, 30_000)` でポーリング +- tick: 有効なスケジュールを DB から取得、`nextRunAt <= now` なものを発火 +- 発火: + 1. overlap チェック (skip なら、同 scheduleId の未完了セッションがあればスキップ) + 2. `TodoSupervisor.createFromSchedule(schedule)` で TODO セッションを作成 + 3. `session-store` に挿入 → 既存のキュー機構に乗る + 4. `lastRunAt = now`, `lastRunSessionId = ...`, `nextRunAt = computeNext(schedule, now)` を保存 + 5. `schedule.fired` イベントを emit → UI 側のトースト購読に届く +- `nextRunAt` 計算は frequency enum に応じた専用ヘルパ (custom のみ cron パース) +- cron パースは `cron-parser` (小さい・7日以上前のリリース確認必須) + +## UI (統合: TodoManager 内 Schedules セクション) + +配置: TodoManager の左サイドバーにタブ「Tasks / Schedules」を追加。 + +### ScheduleList +- 行: enable トグル / 名前 / 次回実行時刻 / 最終実行結果 / ... メニュー (edit / delete) +- 空状態: "+ New Schedule" ボタン + +### ScheduleEditor (ダイアログ) + +ビルダー UI: +1. **名前**: テキスト +2. **ワークスペース**: select (existing workspaces) +3. **プロンプト**: 既存の TodoComposer と同じ UI (description / goal / verify / preset / attachments) +4. **頻度ビルダー**: + - Hourly: `毎時 :MM 分` + - Daily: `毎日 HH:MM` + - Weekly: `毎週[曜日] HH:MM` (曜日チップ複数選択) + - Monthly: `毎月 DD 日 HH:MM` + - Custom: raw cron 式入力 + `cronstrue` でヒューマン表示 +5. **重複時の挙動**: radio `前回が走っていたらスキップ` / `キューに追加` +6. **有効/無効**: トグル + +次回実行予定をプレビュー表示 (`cronstrue` の locale=ja-JP). + +## トースト + +`electronTrpc.todoAgent.schedule.onFire.useSubscription` を TodoManager or +グローバルプロバイダで購読し、以下を表示: + +- 成功: `📅 {name} を実行しました` (→ セッション詳細への遷移リンク) +- skip: `⏭️ {name} の実行をスキップしました (前回が実行中)` + +## 実装ファイル一覧 (新規のみ) + +### Backend +- `packages/local-db/src/schema/todo-schedules.ts` +- `packages/local-db/drizzle/00XX_add_todo_schedules.sql` (自動生成) +- `packages/local-db/src/schema/index.ts` (追記) +- `apps/desktop/src/main/todo-agent/scheduler.ts` +- `apps/desktop/src/main/todo-agent/schedule-store.ts` +- `apps/desktop/src/main/todo-agent/trpc-router.ts` (nested `schedule` router 追記) +- `apps/desktop/src/main/todo-agent/supervisor.ts` (`createFromSchedule` 追加) + +### Frontend +- `apps/desktop/src/renderer/features/todo-agent/TodoManager/SchedulesSection/SchedulesSection.tsx` +- `apps/desktop/src/renderer/features/todo-agent/TodoManager/SchedulesSection/components/ScheduleList/ScheduleList.tsx` +- `apps/desktop/src/renderer/features/todo-agent/TodoManager/SchedulesSection/components/ScheduleEditor/ScheduleEditor.tsx` +- `apps/desktop/src/renderer/features/todo-agent/TodoManager/SchedulesSection/components/FrequencyBuilder/FrequencyBuilder.tsx` +- `apps/desktop/src/renderer/features/todo-agent/TodoManager/SchedulesSection/hooks/useScheduleFireToast/useScheduleFireToast.ts` +- `apps/desktop/src/renderer/features/todo-agent/TodoManager/TodoManager.tsx` (タブ追加・1箇所変更) + +### 依存パッケージ追加 +- `cron-parser` (main side; for custom cron parsing + next-fire computation) +- `cronstrue` (renderer; human-readable cron) +両方とも 7日以上前のリリースが存在する安定 lib。 + +## テスト計画 + +- `scheduler.test.ts`: frequency → nextRunAt 計算, overlap 判定 +- `schedule-store.test.ts`: CRUD / inserted の shape +- `FrequencyBuilder` の簡易描画テスト (optional) + +## ロールアウト + +1. DB schema + migration +2. scheduler + store + tRPC +3. TodoManager UI 統合 +4. トースト +5. 型チェック + lint + 既存 todo セッションテストに干渉しないことを確認 +6. PR → セルフレビュー → マージ + +## リスクと対策 + +| リスク | 対策 | +|------|------| +| アプリ閉じてる間の発火が消える | v1 は諦める。UI に「アプリ起動中のみ」明記 | +| 破壊的コマンドの暴走 | `verifyCommand` は既存通り任意。ユーザー責任。初期はドキュメントで警告 | +| スケジュールの重複暴発 | overlapMode=skip デフォルト + DB index で pending 検出 | +| Claude API 料金の想定外消費 | maxIterations / maxWallClockSec は既存制約をそのまま使う | +| タイムゾーンずれ | ローカル TZ 固定。将来 tz 列追加で拡張可能 | diff --git a/apps/desktop/plans/20260417-todo-agent-remote-control.md b/apps/desktop/plans/20260417-todo-agent-remote-control.md new file mode 100644 index 00000000000..a09f171e6cb --- /dev/null +++ b/apps/desktop/plans/20260417-todo-agent-remote-control.md @@ -0,0 +1,124 @@ +# TODO Agent Remote Control 統合 計画 + +## 背景 + +Claude Code CLI は v2.1.51 で `claude remote-control` / `claude --remote-control` / スラッシュコマンド `/remote-control` を提供し、ローカルで走っているセッションを claude.ai/code や Claude iOS/Android アプリから閲覧・操作できるようになった。 + +TODO Agent は現在 `claude -p --output-format stream-json` をサブプロセスで起動して stdout の NDJSON を parse するヘッドレス方式で動いている。これは Remote Control と互換性がない (`-p` は Ink TUI を持たず、interactive 端末 UI を要求する `/remote-control` を受けられない)。 + +本 PR は PTY + JSONL tail ベースの代替エンジンを feature flag 付きで追加し、Remote Control を opt-in で使えるようにする。 + +## 検証済み事実 (手元 POC 完了) + +- interactive `claude --permission-mode bypassPermissions --settings ''` で Stop / UserPromptSubmit / PreToolUse / PostToolUse / SessionStart hook を inline 注入可能 +- `~/.claude/projects//.jsonl` は interactive モードでも書き込まれる。spawn 後 3 秒以内に生成される +- interactive モードの `--session-id ` は JSONL ファイル名を制御**しない** (別 UUID が内部生成される)。`fs.watch` で project dir の新規ファイルを自セッションとして同定する必要がある +- JSONL event type: `system` / `user` / `user(tool_result)` / `assistant(thinking|text|tool_use)` / `attachment` / `permission-mode` / `file-history-snapshot` / `queue-operation` / `last-prompt` +- PTY への bracketed paste (`\x1b[200~...\x1b[201~\r`) で prompt 投入成功 +- `/remote-control\r` で stdout に `https://claude.ai/code/session_...` が表示される +- mid-session で追加プロンプトを送信可能 + +## アーキテクチャ + +### 選択肢比較 + +| 案 | Remote Control | Live stream | コスト | 採否 | +|----|----------------|-------------|--------|------| +| A. 現状 `-p` | 不可 | ○ | 0 | 部分採用 (既定・非 RC 系は当面これ) | +| B. Agent SDK | 不可 (API key 必須) | ○ | 大 | 却下 | +| C. PTY + JSONL tail | ○ | △ (per-token なし / whole message) | 中 | **本 PR で採用** | +| D. Dual process | △ (競合) | ○ | 小 | 却下 (会話競合リスク) | + +### 案 C の構成 + +``` +[daemon] + ├── supervisor-engine.ts (従来 -p エンジン / 既定) + │ └── runClaudeTurn() : stream-json stdout parse + │ + └── pty-turn-runner.ts (新規 PTY エンジン / opt-in) + └── runClaudeTurnPty() + ├── node-pty spawn + │ claude --permission-mode bypassPermissions + │ --settings '' + │ [--model ...] [--effort ...] + │ [--resume ] + │ + ├── fs.watch(~/.claude/projects//) + │ → 新規 .jsonl を自セッションとして同定 + │ + ├── chokidar 相当の poll + offset tracking + │ → assistant / user(tool_result) / assistant(tool_use) を + │ supervisor-engine と同じ TodoStreamEvent 形に変換 + │ + ├── Stop hook 発火 (Unix/tmp ファイル経由) で turn 終了検知 + │ + ├── Remote Control 有効時のみ PTY stdin に `/remote-control\r` + │ → PTY stdout を ANSI strip 後 `https://claude.ai/code/session_...` + │ を抽出してセッションに保存 + │ + └── bracketed paste で prompt 投入 / 次ターンも同じ PTY 再利用 ... + ではなく、既存 supervisor の iteration ループに合わせて + **1 ターン 1 プロセス** とし、次 iteration は + `--resume ` で再 spawn する +``` + +**重要な設計判断: 1 ターン 1 プロセス** +既存 `supervisor-engine.ts` は iteration ごとに `claude -p` を spawn → exit する。PTY 版もこのライフサイクルに合わせ、1 ターンごとに PTY プロセスを起こして Stop hook で終了させる。これで: + +- 既存 `runSession` ループを変更せず `runClaudeTurn` を差し替えるだけで済む +- ScheduleWakeup の既存処理 (waiting 状態 → 別プロセスで resume) がそのまま動く +- Intervention (追加メッセージ) も既存の queue → 次 iteration 投入フローで動く +- 長命プロセスのリソース管理問題を回避 + +### Feature flag + +- 環境変数 `TODO_ENGINE=pty` で PTY エンジンに切り替え (既定: headless) +- セッション単位の `remote_control_enabled` フラグは UI チェックボックスで opt-in + - PTY エンジン + `remote_control_enabled=true` の AND 条件で Remote Control 発動 + - チェックボックスは `TODO_ENGINE=pty` が無効なときは disabled + +## DB schema 変更 + +`todo_sessions` に 1 列追加: + +```sql +ALTER TABLE todo_sessions ADD COLUMN remote_control_enabled INTEGER DEFAULT 0; +``` + +- `remote_control_session_url` は **永続化しない**。daemon 再起動で RC セッションは切れるため、URL は in-memory + stream event のみで表現 +- Remote Control 状態は stream event で live-stream に流す + +## UI 変更 + +- `TodoModal`: 「Remote Control を有効化」チェックボックス追加 (PTY mode 時のみ有効) +- `ScheduleEditorDialog`: 同様のチェックボックス追加 +- `TodoManager` live stream: RC 接続中バッジ + URL リンクを stream events から読んで表示 + +## 実装順序 + +1. plan.md 追加 (本文書) +2. DB schema: `remote_control_enabled` 列追加 +3. PTY turn runner 本体 (`pty-turn-runner.ts`) +4. supervisor-engine 側の feature flag 分岐 +5. StartRequest / tRPC 入出力 に RC フィールド追加 +6. TodoModal / ScheduleEditorDialog UI +7. live stream バッジ表示 +8. lint / typecheck / 自己レビュー +9. commit / push / PR + +## フォローアップ (後続 PR) + +- dogfood 後 `-p` エンジン削除 +- per-token streaming (JSONL には text_delta が無いので別経路を検討) +- mid-session メッセージ送信 UI (`queueIntervention` 拡張) +- Remote Control URL の永続化 + セッション再接続導線 +- 並列起動時の race 対策強化 +- Electron パッケージでの node-pty ネイティブ rebuild 確認 (既に terminal-host で使用中) + +## 前提条件 + +- `claude auth login` 済 (claude.ai OAuth) +- Claude Code v2.1.51+ +- Pro/Max/Team/Enterprise プラン +- Team/Enterprise は admin が Remote Control トグルを有効化済 diff --git a/apps/desktop/plans/20260418-aivis-dictionary-and-usage.md b/apps/desktop/plans/20260418-aivis-dictionary-and-usage.md new file mode 100644 index 00000000000..45d83ff025e --- /dev/null +++ b/apps/desktop/plans/20260418-aivis-dictionary-and-usage.md @@ -0,0 +1,339 @@ +# Aivis: ユーザー辞書 & 日別使用量ダッシュボード + +作成日: 2026-04-18 +関連 Issue: #286 (Aivis 音声読み上げ機能の拡張) +関連 PR: #287 (Aivis 通知のベース実装) + +## 背景 + +PR #287 で Aivis API による音声読み上げ通知を実装済み。次のステップとして、開発者向けに刺さりそうな以下 2 機能を追加する。 + +1. **ユーザー辞書**: ブランチ名・プロジェクト名・英略語など特殊な読み方をする単語をカスタム登録し、音声合成に反映する +2. **日別使用量ダッシュボード**: Aivis の API 使用状況 (リクエスト数・文字数・クレジット消費) を日別に可視化する + +どちらも Settings > Notifications > Aivis セクション内に配置する。 + +## API 前提 + +### ユーザー辞書 + +| Method | Path | 用途 | +|---|---|---| +| GET | `/v1/user-dictionaries` | 辞書一覧 (uuid, name, description, word_count, created_at, updated_at) | +| GET | `/v1/user-dictionaries/{uuid}` | 辞書詳細 (word_properties 配列まで含む) | +| PUT | `/v1/user-dictionaries/{uuid}` | 辞書を丸ごと置き換え (作成・更新共通) | +| DELETE | `/v1/user-dictionaries/{uuid}` | 辞書削除 | +| POST | `/v1/user-dictionaries/{uuid}/import?override=true\|false` | AivisSpeech 互換 JSON を取り込み | +| GET | `/v1/user-dictionaries/{uuid}/export` | AivisSpeech 互換 JSON を出力 | + +**WordProperty フィールド**: + +```ts +{ + uuid: string; // クライアント側で UUID v4 を採番 + surface: string[]; // 表記 (配列) 例: ["Superset", "superset"] + normalized_surface: string[] | null; + pronunciation: string[]; // カタカナ読み 例: ["スーパーセット"] + accent_type: number[]; // アクセント核位置 (0 始まり)。不明なら 0 + word_type: "PROPER_NOUN" | "COMMON_NOUN" | "VERB" | "ADJECTIVE" | "SUFFIX"; // デフォルト PROPER_NOUN + priority: number; // 0-10、デフォルト 5 +} +``` + +**合成時の指定**: `POST /v1/tts/synthesize` のボディに `user_dictionary_uuid` (単一 uuid、オプション) を載せる。**複数辞書指定は不可**。 + +### 使用量サマリ + +| Method | Path | +|---|---| +| GET | `/v1/payment/usage-summaries?start_date=YYYY-MM-DD&end_date=YYYY-MM-DD` | + +**レスポンス (1 行 = 1 時間 × 1 API キー)**: + +```ts +{ + summaries: Array<{ + api_key_id: string; + api_key_name: string; + summary_date: string; // YYYY-MM-DD + summary_hour: number; // 0-23 + request_count: number; + character_count: number; + credit_consumed: number; + }>; +} +``` + +日別グラフ化はクライアント側で `summary_date` ごとに集計する。API キーが複数ある場合もまとめて合算 (オプションでキー別表示)。 + +## 実装方針 + +### 全体構成 + +- Aivis API 呼び出しは main プロセスに集約 (API キーを renderer に流さない) + - 新規: `apps/desktop/src/main/lib/aivis/client.ts` — 汎用の authorized fetch ラッパー + - 新規: `apps/desktop/src/main/lib/aivis/dictionary.ts` — 辞書 CRUD + - 新規: `apps/desktop/src/main/lib/aivis/usage.ts` — 使用量取得 + 日別集計 +- TRPC `aivis` サブルーターを新設: `apps/desktop/src/lib/trpc/routers/aivis/index.ts` + - 既存の `settings.testAivisPlayback` もこちらに移植 (移植は別PRでも可) + - すべて API キーは main で DB から読み取るため、renderer からは引数不要 +- 既存の settings に `aivisUserDictionaryUuid` (text) を追加し、合成時に載せる + +### ステップ 1: API クライアント + +`apps/desktop/src/main/lib/aivis/client.ts`: + +```ts +const BASE = "https://api.aivis-project.com"; + +function readApiKey(): string | null { + const row = localDb.select().from(settings).get(); + return row?.aivisApiKey || null; +} + +export async function aivisFetch( + path: string, + init: RequestInit & { query?: Record } = {}, +): Promise { + const key = readApiKey(); + if (!key) throw new Error("Aivis API key is not configured"); + const url = new URL(path, BASE); + for (const [k, v] of Object.entries(init.query ?? {})) { + if (v !== undefined) url.searchParams.set(k, v); + } + const res = await fetch(url, { + ...init, + headers: { + Authorization: `Bearer ${key}`, + Accept: "application/json", + ...(init.body && !(init.body instanceof FormData) + ? { "Content-Type": "application/json" } + : {}), + ...init.headers, + }, + }); + if (!res.ok) { + const body = await res.text().catch(() => ""); + throw new Error(`Aivis ${res.status}: ${body.slice(0, 300)}`); + } + return res; +} +``` + +### ステップ 2: DB スキーマ + +`packages/local-db/src/schema/schema.ts` の settings テーブルに追加: + +- `aivisUserDictionaryUuid: text("aivis_user_dictionary_uuid")` — 合成時に適用する辞書 UUID + +マイグレーション自動生成: `bun run generate --name="add_aivis_user_dictionary_uuid"` + +### ステップ 3: 辞書 TRPC ルーター + +`apps/desktop/src/lib/trpc/routers/aivis/dictionary.ts`: + +```ts +export const aivisDictionaryRouter = router({ + list: publicProcedure.query(async () => { + const res = await aivisFetch("/v1/user-dictionaries"); + const json = await res.json(); + return json.user_dictionaries as Array<{ + uuid: string; + name: string; + description: string; + word_count: number; + updated_at: string; + }>; + }), + + get: publicProcedure.input(z.object({ uuid: z.string().uuid() })) + .query(async ({ input }) => { + const res = await aivisFetch(`/v1/user-dictionaries/${input.uuid}`); + return await res.json(); + }), + + upsert: publicProcedure.input(z.object({ + uuid: z.string().uuid(), // 新規作成時は crypto.randomUUID() + name: z.string().max(100), + description: z.string().max(500).default(""), + words: z.array(z.object({ + uuid: z.string().uuid(), + surface: z.array(z.string().min(1)).min(1), + pronunciation: z.array(z.string().min(1)).min(1), + accent_type: z.array(z.number().int().min(0)), + word_type: z.enum(["PROPER_NOUN","COMMON_NOUN","VERB","ADJECTIVE","SUFFIX"]).default("PROPER_NOUN"), + priority: z.number().int().min(0).max(10).default(5), + })), + })).mutation(async ({ input }) => { + await aivisFetch(`/v1/user-dictionaries/${input.uuid}`, { + method: "PUT", + body: JSON.stringify({ + name: input.name, + description: input.description, + word_properties: input.words, + }), + }); + return { success: true }; + }), + + delete: publicProcedure.input(z.object({ uuid: z.string().uuid() })) + .mutation(async ({ input }) => { + await aivisFetch(`/v1/user-dictionaries/${input.uuid}`, { method: "DELETE" }); + return { success: true }; + }), + + export: publicProcedure.input(z.object({ uuid: z.string().uuid() })) + .query(async ({ input }) => { + const res = await aivisFetch(`/v1/user-dictionaries/${input.uuid}/export`); + return await res.json(); // AivisSpeech 互換 Object + }), + + import: publicProcedure.input(z.object({ + uuid: z.string().uuid(), + data: z.record(z.string(), z.unknown()), // AivisSpeech 互換 + override: z.boolean().default(false), + })).mutation(async ({ input }) => { + await aivisFetch(`/v1/user-dictionaries/${input.uuid}/import`, { + method: "POST", + query: { override: String(input.override) }, + body: JSON.stringify(input.data), + }); + return { success: true }; + }), +}); +``` + +### ステップ 4: 合成呼び出しに辞書 UUID を付与 + +`apps/desktop/src/main/lib/notifications/aivis-tts.ts` を拡張: + +- `readAivisSettings()` に `userDictionaryUuid` を追加 +- `synthesize()` が受け取り、リクエストボディに `user_dictionary_uuid` を積む +- `playAivisTts` オプションにも `userDictionaryUuid?: string` を追加 + +### ステップ 5: 使用量サマリルーター + +`apps/desktop/src/lib/trpc/routers/aivis/usage.ts`: + +```ts +export const aivisUsageRouter = router({ + daily: publicProcedure.input(z.object({ + startDate: z.string().regex(/^\d{4}-\d{2}-\d{2}$/), + endDate: z.string().regex(/^\d{4}-\d{2}-\d{2}$/), + })).query(async ({ input }) => { + const res = await aivisFetch("/v1/payment/usage-summaries", { + query: { start_date: input.startDate, end_date: input.endDate }, + }); + const { summaries } = await res.json(); + + // summary_date で集計 + const byDate = new Map; + }>(); + for (const s of summaries) { + const entry = byDate.get(s.summary_date) ?? { + date: s.summary_date, + requestCount: 0, characterCount: 0, creditConsumed: 0, + byApiKey: {}, + }; + entry.requestCount += s.request_count; + entry.characterCount += s.character_count; + entry.creditConsumed += s.credit_consumed; + const bucket = entry.byApiKey[s.api_key_id] ?? { + name: s.api_key_name, requestCount: 0, characterCount: 0, creditConsumed: 0, + }; + bucket.requestCount += s.request_count; + bucket.characterCount += s.character_count; + bucket.creditConsumed += s.credit_consumed; + entry.byApiKey[s.api_key_id] = bucket; + byDate.set(s.summary_date, entry); + } + + return { + days: [...byDate.values()].sort((a, b) => a.date.localeCompare(b.date)), + total: { + requestCount: [...byDate.values()].reduce((a, b) => a + b.requestCount, 0), + characterCount: [...byDate.values()].reduce((a, b) => a + b.characterCount, 0), + creditConsumed: [...byDate.values()].reduce((a, b) => a + b.creditConsumed, 0), + }, + }; + }), +}); +``` + +### ステップ 6: 辞書 UI + +配置: `apps/desktop/src/renderer/routes/_authenticated/settings/ringtones/components/AivisDictionary/` + +- 辞書リスト (name / word_count / 更新日) +- 辞書の新規作成 (name 入力 → crypto.randomUUID でローカル採番して upsert) +- 辞書選択 (ラジオ) → settings.aivisUserDictionaryUuid に保存 +- 辞書編集モーダル: + - 表形式で `surface / pronunciation / accent_type / priority` を編集 + - 行追加 / 削除 / 並べ替え + - accent_type は数値スピナー、word_type は select、priority は 0-10 スライダ +- エクスポート (ダウンロード) / インポート (JSON ファイル選択) +- 削除ボタン (確認ダイアログ) + +**バリデーション注意点**: +- `surface` / `pronunciation` は空配列不可、空文字の要素不可 +- `accent_type` が空なら 0 を自動補完 +- pronunciation はカタカナのみに制限 (正規表現 `/^[\u30A0-\u30FFー]+$/`) + +### ステップ 7: 使用量ダッシュボード UI + +配置: `apps/desktop/src/renderer/routes/_authenticated/settings/ringtones/components/AivisUsage/` + +- 期間選択 (直近 7 日 / 30 日 / カスタム) +- 合計バー (リクエスト / 文字数 / クレジット) +- 日別棒グラフ (シンプルに CSS で作るか、既に入っていれば recharts を利用) + - y 軸: クレジット消費 (既定)。トグルで request_count / character_count に切替 +- 日別テーブル (日付 / リクエスト / 文字数 / クレジット) +- API キーが複数ある場合のみ「キー別内訳」アコーディオン + +**依存追加の判断**: +- 既に `recharts` / `visx` / `chart.js` 等が入っていれば流用 +- なければ最初はシンプルな CSS バーで十分 (過剰依存を避ける) + +## タスク分解 + +1. local-db: `aivisUserDictionaryUuid` 追加 + migration 生成 +2. main: `aivis/client.ts` 追加 (authorized fetch) +3. main: `aivis/dictionary.ts` (ラッパ)、`aivis/usage.ts` (集計ロジック) 追加 +4. TRPC: `aivis` サブルーター (dictionary + usage) を登録 +5. main: `aivis-tts.ts` に `user_dictionary_uuid` を付与するよう拡張 +6. settings: `getAivisSettings`/`setAivisSettings` に `userDictionaryUuid` を追加 +7. UI: AivisDictionary (一覧 + 編集モーダル + import/export) +8. UI: AivisUsage (期間選択 + グラフ + テーブル) +9. 既存 AivisSettings に辞書セレクタを追加 +10. Settings 検索に辞書/使用量アイテムを追加 +11. typecheck / lint / 実機動作確認 + +## リスク・論点 + +- **API レート/クレジット消費**: 使用量グラフの描画で `usage-summaries` を頻繁に叩かない (フォーカス時のみ or 手動更新)。キャッシュ TTL 5 分程度。 +- **エラー表示**: Aivis 401 (キー無効) 時に UI 全体を無効化するか、辞書/使用量だけエラー表示にするか。後者推奨。 +- **辞書 UI の複雑度**: アクセント型の編集は UX が難しい。MVP では数値入力で十分。将来的に実音声プレビュー + アクセントビジュアライザを検討。 +- **複数 API キー**: Aivis 側でキー切替・失効管理が可能。今は単一キー前提だが、将来的には複数キーに拡張できる設計にする (`api_key_id` で集計済み)。 +- **辞書の単語上限**: API ドキュメントに明示上限なし。数千行になるケースを想定し、テーブル UI は仮想スクロールを検討。 + +## 段階リリース案 + +- **Phase 1** (この PR): 辞書 CRUD (最小限の UI) + 合成時適用 +- **Phase 2** (続く PR): 使用量ダッシュボード +- **Phase 3** (余力): 辞書エディタの UX 強化 (音声プレビュー、アクセントビジュアル) + +Phase 1 と 2 は独立しているため、同時に PR を分けて進めても良い。 + +## 完了条件 + +- [ ] 辞書を作成・編集・削除できる +- [ ] 作成した辞書を通知音声合成に適用できる (固有名詞が期待通り読まれる) +- [ ] AivisSpeech 互換 JSON の import/export が動作 +- [ ] 日別使用量を直近 7 日 / 30 日で表示できる +- [ ] API キー未設定時は適切な誘導が出る +- [ ] typecheck / lint / 既存テスト がすべて緑 diff --git a/apps/desktop/plans/20260423-1226-v2-pane-persistence-across-workspace-switch.md b/apps/desktop/plans/20260423-1226-v2-pane-persistence-across-workspace-switch.md deleted file mode 100644 index 60c73dea91e..00000000000 --- a/apps/desktop/plans/20260423-1226-v2-pane-persistence-across-workspace-switch.md +++ /dev/null @@ -1,130 +0,0 @@ -# v2 pane persistence across workspace switch - -## Context - -Switching v2 workspaces unmounts the entire `` subtree -(`layout.tsx:79` uses `key={`${workspace.id}:${hostUrl}`}`). Every pane React -component for the outgoing workspace is torn down and recreated for the -incoming one. Load-bearing long-lived state (xterm instance + WebSocket, -webview guest process, CodeMirror `EditorView`) must live OUTSIDE the -remounting subtree to survive. This note captures the root cause for each -pane kind and the fix pattern so we don't have to re-derive it. - -## Shared root cause - -The `key` on `WorkspaceTrpcProvider` is load-bearing — it exists -(commit `57557f806`) to prevent crashes from hook calls bleeding across -trpc clients during transitions. We cannot remove it. Any pane that wants -to survive workspace switches must: - -1. Hold its long-lived state in a module-level registry singleton. -2. Own a DOM node (or native handle) parented *outside* the React - workspace subtree (body-level `
` is the simplest). -3. Let the React component be a thin placeholder that only drives - position/visibility of the registry-owned node. - -Think "VSCode `TerminalInstance` + `setVisible`" or the existing -`browserRuntimeRegistry` root-container pattern. - -## Terminal — fixed in PR #3687 - -Was broken: `registry.attach()` fused DOM attach with WebSocket open and was -gated on `ensureSession`. The wrapper was `wrapper.remove()`'d on every -React unmount, so workspace switch was visible detach + reattach. The -`ensureSession` gate also added tRPC latency on warm returns, and opened -the WS against a nonexistent session on cold mount → "Session not found". - -Fixed by: -- Park wrapper in a hidden body-level `#v2-terminal-parking` div on - detach instead of `.remove()`. -- Split `attach` into `mount` (sync DOM) and `connect` (called only after - `ensureSession` resolves). -- Narrow `TerminalPane` effect deps to `[terminalId]`; read `workspaceId` - and `websocketUrl` through refs. `websocketUrl` changes go through a - separate `registry.reconnect` that no-ops on a cold transport. - -Refs: `terminal-runtime.ts`, `terminal-runtime-registry.ts`, -`TerminalPane.tsx`. - -## Browser — fixed - -### Symptom - -Switching workspaces destroyed the browser webview (and the guest page -along with it) instead of preserving state across the switch. - -### Root cause - -Confirmed via instrumentation: `browserRuntimeRegistry.destroy` was -being called on workspace switch with a stack rooted in React commit. -The only caller was `usePaneRegistry.tsx`'s `onRemoved` wiring: - -```ts -onRemoved: (pane) => browserRuntimeRegistry.destroy(pane.id), -``` - -`onRemoved` comes from `packages/panes/.../Workspace.tsx`, which diffs -`previousPanesRef` against `current` in a `useEffect` and calls -`registry[kind].onRemoved` for any id that disappeared. The diff lives -inside a single Workspace component instance. Under ideal conditions — -the v2 layout's `key={`${workspace.id}:${hostUrl}`}` remounts on every -switch — this diff should never observe cross-workspace "removal" -because each workspace has its own Workspace component. - -But the remount isn't always prompt: layout.tsx's `useLiveQuery` can -return stale WS-A data for a tick while `page.tsx`'s query has already -flipped to WS-B. During that tick the `key` hasn't changed yet, so the -existing `WorkspaceContent` stays mounted, `useV2WorkspacePaneLayout` -calls `store.replaceState(WS-B panes)` on the *same* store instance, -and the Panes library's diff correctly observes "the browser pane from -WS-A is gone now" → fires `onRemoved` → destroys the webview. By the -time the user returns to WS-A, the entry is gone; `attach()` runs the -`createEntry()` cold path and the webview is recreated with its -`initialUrl`, losing state. - -The terminal never hit this because terminal destruction goes through -`useGlobalTerminalLifecycle`, which sweeps against *all* workspaces' -persisted `paneLayout` rows and only destroys ids that are provably -absent everywhere. Cross-workspace "removal" isn't a real removal from -that sweep's perspective. - -### Fix - -Mirrored the terminal pattern: added `useGlobalBrowserLifecycle` under -`_authenticated/components/GlobalBrowserLifecycle/`, mounted it next to -`` in `_authenticated/layout.tsx`, and -removed the `onRemoved` wiring from `usePaneRegistry.tsx`. The new hook -extracts browser `pane.id`s from every workspace's `paneLayout`, diffs -against the previous set, and schedules `browserRuntimeRegistry.destroy` -on a 500 ms grace delay (same timing as the terminal sweep) so -cross-workspace pane moves don't trigger premature teardown. - -Hypothesis #1 (placeholder-rect race) and #3 (webview recycling on -`visibility: hidden`) from the original list did not reproduce once #2 -was fixed — the instrumentation showed `updateLayout` applying correct -non-zero rects and the webview surviving detach as long as no `destroy` -call fired. Left in place as known-good; will revisit if a future -regression points at either. - -## File / Code editor — lower priority - -File-viewer panes use CodeMirror `EditorView` created in a `useEffect([])` -inside `CodeEditor.tsx:153-171`, disposed on unmount. Workspace switch -therefore loses: undo history, cursor position, scroll position, any -unsaved viewport scroll. Not reported yet but predictable; users may -complain after terminal/browser are solid. - -Fix pattern is identical: a module-level `codeEditorRegistry` keyed by -`${workspaceId}:${filePath}` (or pane id, if file viewer panes are -per-workspace) that owns the `EditorView` and its host div, with a body- -level root container. `CodeEditor` becomes a placeholder that registers -a rect. - -Defer until it's a reported problem — the migration is mechanical but -the value is speculative and CodeMirror re-init is already fast. - -## Not in scope - -- v1 terminal. Sunset per CLAUDE.md / memory. -- v2 chat pane. Currently a "temporarily disabled" stub. -- Diff / comment / devtools. No long-lived state. diff --git a/apps/desktop/plans/todo-agent-plan.md b/apps/desktop/plans/todo-agent-plan.md new file mode 100644 index 00000000000..213e7d6de0a --- /dev/null +++ b/apps/desktop/plans/todo-agent-plan.md @@ -0,0 +1,285 @@ +# TODO 自律エージェント 実装計画 + +フォーク内限定の機能。ワークスペースの `Run` ボタンの左側にボタンを追加し、 +ユーザーが定義した目標が検証可能な形で達成されるまで、無人で実行を続ける +自律的な Claude Code ループを起動できるようにする。実行中のワーカー端末は +常にライブで可視化され、ユーザーは必要に応じて介入できる。 + +## 目的 + +- ユーザーは (1) 何をしてほしいか と (2) 明確なゴール + (受け入れ判定コマンド)を入力するだけでよく、その後は追加の指示なしで + システムが Claude Code を完了まで動かす。 +- ライブ可視性: 実行中ワーカーは実際の PTY であり、既存の + `TerminalPane` コンポーネントで描画されるため、誰でも監視したり + 直接入力したりできる。 +- 信頼性: 完了判定は決定的な verify コマンドの終了コードで行い、 + LLM の自己申告には依存しない。 +- 逐次実行: 同時にアクティブなのは 1 タスクのみとし、それ以外はキューに入れる。 +- upstream とのマージ容易性: 新規コードはすべて新しいファイル / ディレクトリに + 置き、既存ファイルへの変更は追記のみ、かつ 1 行変更を 3 箇所に限定する。 + +## 非目的(v1) + +- タスクの並列実行。 +- Cloud / Modal 上のサンドボックス実行 + (ローカル worktree のみを対象とする)。 +- セッションをまたいだ LLM 判定。最終判定はシェルの verify コマンドとする。 +- PR の自動作成。(v2 で対応予定) + +## アーキテクチャ + +``` +Renderer Main process +──────── ──────────── +TodoButton (PresetsBar) TodoSupervisor (singleton) + └─ TodoModal ──► trpc todo.create ──────► createSession() + ├─ writes .superset/todo//goal.md + ├─ inserts DB row (queued) + └─ returns sessionId +TodoPanel enqueue / runQueue loop + ├─ trpc todo.subscribeState ◄─────────── state observable (per session) + ├─ embeds ◄──────── (paneId assigned by renderer) + ├─ Abort / Pause buttons ├─ spawnWorker(paneId) via + └─ Intervene input ──► trpc todo.sendKey ─┘ existing terminal.write + ├─ subscribe data:${paneId} + │ (idle timer + log capture) + ├─ runVerify() (child_process) + └─ update state / next iteration +``` + +Supervisor は **メインプロセス上で動く純粋な TypeScript** であり、 +2 つ目の Claude Code インスタンスではない。これが最も重要な単純化ポイントで、 +LLM 間通信は存在せず、「管理」役は決定論的な TS コードで担い、 +創造的な処理はすべてワーカー側に集約する。 + +## 実行ループ + +各セッションは状態遷移ごとに DB へ永続化する: + +``` +queued → preparing → running → verifying → done + │ │ + │ └──► running (fail, under budget) + │ │ + │ └──► escalated (futility) + └──► aborted +``` + +各イテレーションの流れ: + +1. Supervisor はワーカー用 PTY ペインの存在を確認する + (初回は renderer が `tabs.addTerminalPane` で作成し、 + `todo.attachPane` で `paneId` を登録する)。 +2. `goal.md`、現在の `state.json`、およびリトライ時は verify 失敗ログの末尾を + もとにプロンプトを組み立てる。 +3. Supervisor はそのプロンプトを `terminal.write` 経由で PTY に書き込む。 + ワーカー側では、対話モードの `claude` が既にペイン内で待機している。 +4. Supervisor は node-pty emitter の `data:${paneId}` イベントを購読する + (メインプロセスから + `getWorkspaceRuntimeRegistry().getDefault().terminal` で直接参照可能)。 + チャンクを受け取るたびに 5 秒のアイドルタイマーをリセットする。 +5. ストリームがしきい値時間だけアイドル状態になり、かつ + ターン完了ヒューリスティックを満たしたら、Supervisor は worktree 上で + `verifyCommand` を独立した child process として実行し、 + 終了コードとログ末尾を取得する。 +6. `exit 0` の場合は状態を `done` にし、判定結果を記録して通知を送る。 +7. 非 0 の場合は futile 判定 + (同じ failing test が N 回連続、または同じ diff が 2 回連続)を行い、 + 次イテレーションへ進むか、`escalated` にするかを決める。 +8. 状態が変わるたびに Supervisor は `sessionId` をキーにした + `EventEmitter` へ通知し、それを trpc subscription 側が購読する。 + +### Stop hook ではなく idle 検知を使う理由 + +Stop hook の方がきれいだが、ワーカー起動コマンドへ +`--settings ` を差し込む必要があり、これはインストール済みの +Claude Code バイナリがそのフラグをサポートしているかに依存する。v1 では、 +Claude Code CLI の内部仕様と結合しないように idle 検知を使う。 +Stop hook 連携は v2 の拡張項目として、後述の `Unresolved` に記載する。 + +### 予算と futile ガード + +- `maxIterations`(デフォルト 10) +- `maxWallClockSec`(デフォルト 1800) +- `maxTurnsPerIteration` は強制しない + (対話モードのため)。wall-clock と iteration 上限を優先する。 +- Futility: verify が同じテスト名で 3 イテレーション連続失敗する、 + あるいは worktree diff が前回イテレーションと完全一致する場合。 +- 予算超過または futility 検知時は `escalated` とし、セッションは永続化しつつ、 + ワーカーペインはそのまま残してユーザーが引き継げるようにする。 + +## 介入 UX + +- PTY は通常のターミナルなので、`TerminalPane` を開いているユーザーは + 直接入力できる。Supervisor が入力を専有することはない。 +- `TodoPanel` でもワンクリックの `Send` 入力欄を提供し、 + ユーザーがターミナルにフォーカスを移さなくても + `terminal.write({paneId, data})` を実行できるようにする。 +- `Pause` ボタンはイテレーションスケジューラを停止するだけで、 + ワーカーの現在のターン自体は継続する。kill はしない。 +- `Abort` は PTY に `Ctrl-C`(`\x03`)を 2 回送ったうえで、 + 状態を `aborted` にする。 + +## UI サーフェス + +- **`TodoButton`**: `PresetsBar.tsx:488` の `WorkspaceRunButton` 左に置く + コンパクトなボタン。キュー中 + 実行中セッション数の小さなカウンターを表示する。 + クリックで `New TODO`、`Open panel`、最近のセッションを含むドロップダウンを開く。 +- **`TodoModal`**: フォーム項目は以下。 + - タイトル(必須) + - 説明(必須、複数行) + - ゴール / 受け入れ条件(必須、複数行) + - Verify コマンド(デフォルト: `bun test`) + - 予算: 最大イテレーション数(デフォルト 10)、 + wall-clock 分数(デフォルト 30) +- **`TodoPanel`**: 右側ドロワー。左にセッション一覧、右に詳細。 + 詳細にはゴール、フェーズ、イテレーション、残り予算、最新の判定結果、 + ワーカー用に埋め込まれた ``、および + Pause / Abort / Send コントロールを表示する。 + +## フォーク衝突面 + +### 新規ファイル(衝突リスクなし) + +``` +apps/desktop/plans/todo-agent-plan.md (this file) +apps/desktop/src/main/todo-agent/ + index.ts barrel + types.ts shared types + zod schemas + supervisor.ts singleton loop driver + session-store.ts in-memory session map + EventEmitter fan-out + worker-pty.ts thin wrapper around terminal.write / onData + verify-runner.ts child_process exec of verifyCommand + futility-detector.ts repeat-failure / diff-stall detection + prompt-builder.ts composes the claude prompt per iteration + trpc-router.ts tRPC router factory (createTodoAgentRouter) +packages/local-db/src/schema/todo-sessions.ts (new table) +apps/desktop/src/renderer/features/todo-agent/ + TodoButton/TodoButton.tsx + TodoButton/index.ts + TodoModal/TodoModal.tsx + TodoModal/index.ts + TodoPanel/TodoPanel.tsx + TodoPanel/index.ts + hooks/useTodoSession.ts + hooks/useTodoQueue.ts +``` + +### 変更する既存ファイル(最小限、追記のみ) + +1. `packages/local-db/src/schema/index.ts` および `schema.ts` + 1 行追加: `export * from "./todo-sessions";` +2. `apps/desktop/src/lib/trpc/routers/index.ts` + import 1 行 + router object に 1 行追加: + `todoAgent: createTodoAgentRouter()`. +3. `apps/desktop/src/renderer/screens/main/components/WorkspaceView/ContentView/components/PresetsBar/PresetsBar.tsx` + 既存の `` 描画直前の 1 行 + (488 行目付近)に + `` + を追加。 + +この 3 つの変更はいずれも 1 行単位で孤立しているため、 +upstream 側で多少の変更があっても衝突しにくい。 + +## データモデル + +```ts +// packages/local-db/src/schema/todo-sessions.ts (SQLite) +export const todoSessions = pgTable("todo_sessions", { + id: uuid().primaryKey().defaultRandom(), + organizationId: uuid("organization_id").notNull().references(() => organizations.id), + projectId: uuid("project_id").references(() => projects.id), + workspaceId: uuid("workspace_id").notNull().references(() => workspaces.id), + createdByUserId: uuid("created_by_user_id").references(() => users.id), + + title: text().notNull(), + description: text().notNull(), + goal: text().notNull(), + verifyCommand: text("verify_command").notNull(), + + // Budget + maxIterations: integer("max_iterations").notNull().default(10), + maxWallClockSec: integer("max_wall_clock_sec").notNull().default(1800), + + // State + status: text().notNull().default("queued"), // queued|preparing|running|verifying|done|failed|escalated|aborted + phase: text(), + iteration: integer().notNull().default(0), + attachedPaneId: text("attached_pane_id"), + + // Verdict + verdictPassed: boolean("verdict_passed"), + verdictReason: text("verdict_reason"), + verdictFailingTest: text("verdict_failing_test"), + + // Artifacts + artifactPath: text("artifact_path").notNull(), // .superset/todo// + + createdAt: timestamp("created_at").notNull().defaultNow(), + updatedAt: timestamp("updated_at").notNull().defaultNow(), + startedAt: timestamp("started_at"), + completedAt: timestamp("completed_at"), +}, (table) => [ + index("todo_sessions_workspace_idx").on(table.workspaceId), + index("todo_sessions_status_idx").on(table.status), +]); + +export type InsertTodoSession = typeof todoSessions.$inferInsert; +export type SelectTodoSession = typeof todoSessions.$inferSelect; +``` + +ユーザー側で `bunx drizzle-kit generate --name="add_todo_sessions"` を実行する。 +リポジトリポリシーに従い、こちらでは実行しない。 + +## tRPC サーフェス + +``` +todoAgent.create(input) → { sessionId } +todoAgent.list(workspaceId) → SelectTodoSession[] +todoAgent.get(sessionId) → SelectTodoSession +todoAgent.attachPane(sessionId, paneId) → void +todoAgent.pause(sessionId) → void +todoAgent.resume(sessionId) → void +todoAgent.abort(sessionId) → void +todoAgent.sendInput(sessionId, data) → void (passthrough to terminal.write) +todoAgent.subscribeState(sessionId) → observable +``` + +すべての subscription は `observable` ヘルパーを使い、 +`apps/desktop/AGENTS.md` に記載された trpc-electron の制約を満たす。 + +## 段階的な提供 + +**Phase 1(このブランチ)** +- DB テーブル + migration +- 単一タスク対応・キューなし・idle 検知ループ・child_process による verify を備えた + Supervisor の骨組み +- ライブペイン埋め込み付きの `TodoButton` + `TodoModal` + `TodoPanel` +- Pause / Abort / Send Input + +**Phase 2** +- キュー + (複数セッションの逐次実行) +- Futility 検知の強化 +- `--settings` を使った Stop hook 連携の任意対応 +- Issue URL の自動取り込み + (`gh issue view` → ゴールの事前入力) + +**Phase 3** +- `done` 時の PR draft 自動作成 +- 通知 +- 追加 worktree による並列実行 + +## 未解決事項 + +- インストール済みの Claude Code バイナリが、セッション単位の hook 注入用に + `--settings ` フラグをサポートしているかどうか。 + Phase 2 の確認項目とする。 +- `verifyCommand` をワーカー PTY 内で実行するべきか、 + 別 child process で実行するべきか。現行案では、 + verify 出力でユーザーに見えるターミナルを汚さないため、 + 別 child process を使う。verify 出力をインラインで見たい要望が強ければ再検討する。 +- クラウドワークスペース実行時に、artifact + (`.superset/todo//`)をどこへ永続化するか。 + v1 ではローカル限定のため対象外。 diff --git a/apps/desktop/runtime-dependencies.ts b/apps/desktop/runtime-dependencies.ts index dc4c7712bfb..7410c8a4377 100644 --- a/apps/desktop/runtime-dependencies.ts +++ b/apps/desktop/runtime-dependencies.ts @@ -49,6 +49,12 @@ const externalizedRuntimeModules: ExternalizedRuntimeModule[] = [ packagedCopies: [copyWholeModule("@superset/macos-process-metrics")], asarUnpackGlobs: ["**/node_modules/@superset/macos-process-metrics/**/*"], }, + { + specifier: "@superset/macos-window-blur", + materialize: ["@superset/macos-window-blur"], + packagedCopies: [copyWholeModule("@superset/macos-window-blur")], + asarUnpackGlobs: ["**/node_modules/@superset/macos-window-blur/**/*"], + }, { specifier: "@ast-grep/napi", materialize: ["@ast-grep/napi"], @@ -73,6 +79,15 @@ const externalizedRuntimeModules: ExternalizedRuntimeModule[] = [ ], asarUnpackGlobs: ["**/node_modules/@libsql/**/*"], }, + { + // Ships the ripgrep binary with the app so VSCode-style .gitignore-aware + // Quick Open / Files tab search works for every user regardless of + // whether they have rg on their system PATH. + specifier: "@vscode/ripgrep", + materialize: ["@vscode/ripgrep"], + packagedCopies: [copyWholeModule("@vscode/ripgrep")], + asarUnpackGlobs: ["**/node_modules/@vscode/ripgrep/**/*"], + }, ]; const packagedSupportModules = [ @@ -83,6 +98,17 @@ const packagedSupportModules = [ copyWholeModule("is-extglob"), copyWholeModule("picomatch"), copyWholeModule("node-addon-api"), + copyWholeModule("typescript"), + copyWholeModule("yaml-language-server"), + copyWholeModule("dockerfile-language-server-nodejs"), + copyWholeModule("graphql-language-service-cli"), + copyWholeModule("graphql"), + copyWholeModule("pyright"), + copyWholeModule("vscode-css-languageservice"), + copyWholeModule("vscode-html-languageservice"), + copyWholeModule("vscode-json-languageservice"), + copyWholeModule("vscode-languageserver-textdocument"), + copyWholeModule("vscode-langservers-extracted"), ]; export const mainExternalizedDependencies = [ @@ -115,4 +141,15 @@ export const requiredMaterializedNodeModules = [ "is-extglob", "picomatch", "node-addon-api", + "typescript", + "yaml-language-server", + "dockerfile-language-server-nodejs", + "graphql-language-service-cli", + "graphql", + "pyright", + "vscode-css-languageservice", + "vscode-html-languageservice", + "vscode-json-languageservice", + "vscode-languageserver-textdocument", + "vscode-langservers-extracted", ]; diff --git a/apps/desktop/scripts/copy-native-modules.ts b/apps/desktop/scripts/copy-native-modules.ts index 3b34f3ecc75..c730c9de9ac 100644 --- a/apps/desktop/scripts/copy-native-modules.ts +++ b/apps/desktop/scripts/copy-native-modules.ts @@ -24,8 +24,8 @@ import { realpathSync, rmSync, } from "node:fs"; -import { dirname, join } from "node:path"; -import { satisfies } from "semver"; +import { dirname, join, relative } from "node:path"; +import { maxSatisfying, satisfies } from "semver"; import { requiredMaterializedNodeModules } from "../runtime-dependencies"; // Target architecture for cross-compilation. When set, platform-specific @@ -53,15 +53,40 @@ function getBunStoreDir(nodeModulesDir: string): string { function findBunStoreFolderName( bunStoreDir: string, moduleName: string, - version: string, + versionRange: string, ): string | null { if (!existsSync(bunStoreDir)) return null; const entries = readdirSync(bunStoreDir); const modulePrefix = `${moduleName.replace("/", "+")}@`; - const exactPrefix = `${modulePrefix}${version}`; - const exactMatch = entries.find((entry) => entry.startsWith(exactPrefix)); - if (exactMatch) return exactMatch; - return entries.find((entry) => entry.startsWith(modulePrefix)) ?? null; + const matchingEntries = entries.filter((entry) => + entry.startsWith(modulePrefix), + ); + + const extractVersion = (entry: string): string | null => { + const remainder = entry.slice(modulePrefix.length); + const candidate = remainder.split("_")[0]; + return candidate.length > 0 ? candidate : null; + }; + + const versions = matchingEntries + .map((entry) => ({ entry, version: extractVersion(entry) })) + .filter( + (item): item is { entry: string; version: string } => + item.version !== null, + ); + + const exactMatch = versions.find((item) => item.version === versionRange); + if (exactMatch) return exactMatch.entry; + + const bestMatch = maxSatisfying( + versions.map((item) => item.version), + versionRange, + ); + if (!bestMatch) { + return null; + } + + return versions.find((item) => item.version === bestMatch)?.entry ?? null; } function copyModuleIfSymlink( @@ -142,6 +167,7 @@ function copyExactModuleVersion( moduleName, ); if (existsSync(sourcePath)) { + rmSync(destPath, { recursive: true, force: true }); mkdirSync(dirname(destPath), { recursive: true }); cpSync(sourcePath, destPath, { recursive: true }); console.log(` Copied ${moduleName}@${version} to: ${destPath}`); @@ -163,43 +189,179 @@ function copyExactModuleVersion( return false; } +function resolveDependencySource( + moduleName: string, + versionRange: string, +): { + sourceModuleName: string; + sourceVersionRange: string; +} { + if (!versionRange.startsWith("npm:")) { + return { + sourceModuleName: moduleName, + sourceVersionRange: versionRange, + }; + } + + const aliasSpec = versionRange.slice(4); + const match = aliasSpec.match(/^((?:@[^/]+\/)?[^@]+)@(.+)$/); + if (!match) { + return { + sourceModuleName: moduleName, + sourceVersionRange: versionRange, + }; + } + + return { + sourceModuleName: match[1], + sourceVersionRange: match[2], + }; +} + function copyDependencyForPackage( nodeModulesDir: string, parentModuleName: string, dependencyName: string, dependencyRange: string, required: boolean, -): void { + options?: { + preferNested?: boolean; + }, +): string | null { + const resolvedDependency = resolveDependencySource( + dependencyName, + dependencyRange, + ); const topLevelDependencyPath = join(nodeModulesDir, dependencyName); const topLevelVersion = readInstalledModuleVersion(topLevelDependencyPath); + const sourceTopLevelDependencyPath = join( + nodeModulesDir, + resolvedDependency.sourceModuleName, + ); + const sourceTopLevelVersion = readInstalledModuleVersion( + sourceTopLevelDependencyPath, + ); + const nestedDependencyPath = join( + nodeModulesDir, + parentModuleName, + "node_modules", + dependencyName, + ); + const preferNested = options?.preferNested ?? false; + + const materializeNestedFromSource = (sourcePath: string): string => { + rmSync(nestedDependencyPath, { recursive: true, force: true }); + mkdirSync(dirname(nestedDependencyPath), { recursive: true }); + cpSync(sourcePath, nestedDependencyPath, { + recursive: true, + }); + return nestedDependencyPath; + }; + const materializeTopLevelFromSource = (sourcePath: string): string => { + rmSync(topLevelDependencyPath, { recursive: true, force: true }); + mkdirSync(dirname(topLevelDependencyPath), { recursive: true }); + cpSync(sourcePath, topLevelDependencyPath, { + recursive: true, + }); + return topLevelDependencyPath; + }; + + if (preferNested) { + const nestedVersion = readInstalledModuleVersion(nestedDependencyPath); + if ( + nestedVersion && + satisfies(nestedVersion, resolvedDependency.sourceVersionRange) + ) { + const nestedStats = lstatSync(nestedDependencyPath); + if (nestedStats.isSymbolicLink()) { + const realPath = realpathSync(nestedDependencyPath); + rmSync(nestedDependencyPath); + cpSync(realPath, nestedDependencyPath, { + recursive: true, + }); + } + return nestedDependencyPath; + } - if (topLevelVersion && satisfies(topLevelVersion, dependencyRange)) { + if ( + topLevelVersion && + satisfies(topLevelVersion, resolvedDependency.sourceVersionRange) + ) { + // Do NOT materialize the top-level symlink; electron-builder would then + // traverse it and find its deps missing (they are placed nested here). + // Instead, dereference the symlink and copy directly to the nested path. + const realSource = lstatSync(topLevelDependencyPath).isSymbolicLink() + ? realpathSync(topLevelDependencyPath) + : topLevelDependencyPath; + return materializeNestedFromSource(realSource); + } + + if ( + resolvedDependency.sourceModuleName !== dependencyName && + sourceTopLevelVersion && + satisfies(sourceTopLevelVersion, resolvedDependency.sourceVersionRange) + ) { + const realSource = lstatSync( + sourceTopLevelDependencyPath, + ).isSymbolicLink() + ? realpathSync(sourceTopLevelDependencyPath) + : sourceTopLevelDependencyPath; + return materializeNestedFromSource(realSource); + } + + console.log( + ` ${dependencyName}: materializing nested copy for ${parentModuleName} (${topLevelVersion ?? sourceTopLevelVersion ?? "missing"} does not satisfy ${resolvedDependency.sourceVersionRange})`, + ); + copyExactModuleVersion( + nodeModulesDir, + resolvedDependency.sourceModuleName, + resolvedDependency.sourceVersionRange, + nestedDependencyPath, + required, + ); + return nestedDependencyPath; + } + + if ( + topLevelVersion && + satisfies(topLevelVersion, resolvedDependency.sourceVersionRange) + ) { copyModuleIfSymlink(nodeModulesDir, dependencyName, required); - return; + return topLevelDependencyPath; + } + + if ( + resolvedDependency.sourceModuleName !== dependencyName && + sourceTopLevelVersion && + satisfies(sourceTopLevelVersion, resolvedDependency.sourceVersionRange) + ) { + copyModuleIfSymlink( + nodeModulesDir, + resolvedDependency.sourceModuleName, + required, + ); + return materializeTopLevelFromSource(sourceTopLevelDependencyPath); } if (!topLevelVersion) { console.log( - ` ${dependencyName}: top-level version missing; materializing ${dependencyRange} at the workspace root`, + ` ${dependencyName}: top-level version missing; materializing ${resolvedDependency.sourceVersionRange} at the workspace root`, ); copyExactModuleVersion( nodeModulesDir, - dependencyName, - dependencyRange, + resolvedDependency.sourceModuleName, + resolvedDependency.sourceVersionRange, topLevelDependencyPath, required, ); - return; + return topLevelDependencyPath; } - const nestedDependencyPath = join( - nodeModulesDir, - parentModuleName, - "node_modules", - dependencyName, - ); const nestedVersion = readInstalledModuleVersion(nestedDependencyPath); - if (nestedVersion && satisfies(nestedVersion, dependencyRange)) { + if ( + nestedVersion && + satisfies(nestedVersion, resolvedDependency.sourceVersionRange) + ) { const nestedStats = lstatSync(nestedDependencyPath); if (nestedStats.isSymbolicLink()) { const realPath = realpathSync(nestedDependencyPath); @@ -208,20 +370,80 @@ function copyDependencyForPackage( recursive: true, }); } - return; + return nestedDependencyPath; } console.log( - ` ${dependencyName}: top-level version ${topLevelVersion ?? "missing"} does not satisfy ${dependencyRange}; materializing nested copy for ${parentModuleName}`, + ` ${dependencyName}: top-level version ${topLevelVersion ?? sourceTopLevelVersion ?? "missing"} does not satisfy ${resolvedDependency.sourceVersionRange}; materializing nested copy for ${parentModuleName}`, ); copyExactModuleVersion( nodeModulesDir, - dependencyName, - dependencyRange, + resolvedDependency.sourceModuleName, + resolvedDependency.sourceVersionRange, nestedDependencyPath, required, ); + + return nestedDependencyPath; +} + +function materializeProductionDependencyTree( + nodeModulesDir: string, + packageRelativePath: string, + seen: Set, +): void { + const packagePath = join(nodeModulesDir, packageRelativePath); + const packageJsonPath = join(packagePath, "package.json"); + + if (!existsSync(packageJsonPath)) { + return; + } + + type PackageJson = { + name?: string; + version?: string; + dependencies?: Record; + }; + + const packageJson = JSON.parse( + readFileSync(packageJsonPath, "utf8"), + ) as PackageJson; + const packageKey = packageJson.name + ? `${packageJson.name}@${packageJson.version ?? "0.0.0"}` + : realpathSync(packagePath); + + if (seen.has(packageKey)) { + return; + } + seen.add(packageKey); + + try { + for (const [dependencyName, dependencyRange] of Object.entries( + packageJson.dependencies ?? {}, + )) { + const dependencyPath = copyDependencyForPackage( + nodeModulesDir, + packageRelativePath, + dependencyName, + dependencyRange, + true, + { preferNested: true }, + ); + + if (!dependencyPath) { + continue; + } + + materializeProductionDependencyTree( + nodeModulesDir, + relative(nodeModulesDir, dependencyPath), + seen, + ); + } + } finally { + seen.delete(packageKey); + } } /** @@ -484,6 +706,33 @@ function prepareNativeModules() { copyModuleIfSymlink(nodeModulesDir, moduleName, true); } + console.log("\nMaterializing runtime dependency trees..."); + const runtimeDependencyRoots = [ + "yaml-language-server", + "dockerfile-language-server-nodejs", + "dockerfile-language-service", + "dockerfile-ast", + "dockerfile-utils", + "graphql-language-service-cli", + "pyright", + "vscode-css-languageservice", + "vscode-html-languageservice", + "vscode-json-languageservice", + "vscode-languageserver-textdocument", + "vscode-languageserver-types", + "vscode-langservers-extracted", + "strip-ansi", + ]; + const seenPackages = new Set(); + for (const moduleName of runtimeDependencyRoots) { + copyModuleIfSymlink(nodeModulesDir, moduleName, true); + materializeProductionDependencyTree( + nodeModulesDir, + moduleName, + seenPackages, + ); + } + console.log("\nPreparing ast-grep platform package..."); copyAstGrepPlatformPackages(nodeModulesDir); copyParcelWatcherPlatformPackages(nodeModulesDir); diff --git a/apps/desktop/scripts/sync-aivis-presets.ts b/apps/desktop/scripts/sync-aivis-presets.ts new file mode 100644 index 00000000000..548aa8ce97e --- /dev/null +++ b/apps/desktop/scripts/sync-aivis-presets.ts @@ -0,0 +1,204 @@ +#!/usr/bin/env bun +/** + * One-shot: resolve the built-in Aivis preset models against the public Aivis + * search API, download their icons into `renderer/assets/aivis-models/`, and + * regenerate `shared/aivis-presets-data.ts`. Run when adding/removing presets + * or when an icon URL changes upstream. + * + * Usage: bun run scripts/sync-aivis-presets.ts + */ +import { mkdirSync, writeFileSync } from "node:fs"; +import { join } from "node:path"; + +const PRESET_NAMES = [ + "まい", + "花音", + "るな", + "桜音", + "中2", + "zonoko", + "コハク", + "まお", + "天深シノ", +]; + +const BASE = "https://api.aivis-project.com"; +const ASSET_DIR = join(__dirname, "../src/renderer/assets/aivis-models"); +const OUT_FILE = join( + __dirname, + "../src/renderer/routes/_authenticated/settings/ringtones/components/AivisSettings/components/ModelPresetTiles/preset-data.ts", +); + +interface AivmModel { + aivm_model_uuid: string; + name: string; + user?: { name?: string; handle?: string; icon_url?: string | null }; + speakers?: Array<{ + icon_url?: string | null; + styles?: Array<{ + voice_samples?: Array<{ audio_url?: string | null }>; + }>; + }>; +} + +function slug(s: string): string { + return s + .toLowerCase() + .replace(/[^a-z0-9\u3040-\u309f\u30a0-\u30ff\u4e00-\u9fff]+/g, "-") + .replace(/^-+|-+$/g, ""); +} + +function extFromContentType(ct: string): string { + if (ct.includes("png")) return "png"; + if (ct.includes("jpeg") || ct.includes("jpg")) return "jpg"; + if (ct.includes("webp")) return "webp"; + if (ct.includes("svg")) return "svg"; + return "png"; +} + +async function searchByName(name: string): Promise { + const url = new URL("/v1/aivm-models/search", BASE); + url.searchParams.set("keyword", name); + url.searchParams.set("limit", "5"); + const res = await fetch(url); + if (!res.ok) { + console.warn(` search failed for "${name}": ${res.status}`); + return null; + } + const json = (await res.json()) as { aivm_models?: AivmModel[] }; + const models = json.aivm_models ?? []; + const exact = models.find((m) => m.name === name); + const summary = exact ?? models[0] ?? null; + if (!summary) return null; + // Search results don't include voice_samples; fetch the model detail. + const detailRes = await fetch( + new URL(`/v1/aivm-models/${summary.aivm_model_uuid}`, BASE), + ); + if (!detailRes.ok) return summary; + return (await detailRes.json()) as AivmModel; +} + +function audioExtFromContentType(ct: string): string { + if (ct.includes("mp3") || ct.includes("mpeg")) return "mp3"; + if (ct.includes("wav")) return "wav"; + if (ct.includes("ogg") || ct.includes("opus")) return "ogg"; + if (ct.includes("m4a") || ct.includes("aac") || ct.includes("mp4")) + return "m4a"; + if (ct.includes("flac")) return "flac"; + return "mp3"; +} + +async function downloadAudio( + url: string, + name: string, +): Promise { + const res = await fetch(url); + if (!res.ok) { + console.warn(` sample download failed for "${name}": ${res.status}`); + return null; + } + const ct = res.headers.get("content-type") ?? "audio/mpeg"; + const ext = audioExtFromContentType(ct); + const filename = `${slug(name)}.${ext}`; + const buf = Buffer.from(await res.arrayBuffer()); + mkdirSync(ASSET_DIR, { recursive: true }); + writeFileSync(join(ASSET_DIR, filename), buf); + return filename; +} + +async function download(url: string, name: string): Promise { + const res = await fetch(url); + if (!res.ok) { + console.warn(` icon download failed for "${name}": ${res.status}`); + return null; + } + const ct = res.headers.get("content-type") ?? "image/png"; + const ext = extFromContentType(ct); + const filename = `${slug(name)}.${ext}`; + const buf = Buffer.from(await res.arrayBuffer()); + mkdirSync(ASSET_DIR, { recursive: true }); + writeFileSync(join(ASSET_DIR, filename), buf); + return filename; +} + +async function main() { + mkdirSync(ASSET_DIR, { recursive: true }); + + const entries: Array<{ + name: string; + uuid: string; + iconFilename: string | null; + sampleFilename: string | null; + authorName: string | null; + }> = []; + + for (const name of PRESET_NAMES) { + console.log(`Resolving "${name}"…`); + const m = await searchByName(name); + if (!m) { + console.warn(` not found, skipping`); + continue; + } + const iconUrl = m.speakers?.[0]?.icon_url ?? m.user?.icon_url ?? null; + const sampleUrl = + m.speakers?.[0]?.styles?.[0]?.voice_samples?.[0]?.audio_url ?? null; + const iconFilename = iconUrl ? await download(iconUrl, name) : null; + const sampleFilename = sampleUrl + ? await downloadAudio(sampleUrl, name) + : null; + entries.push({ + name: m.name, + uuid: m.aivm_model_uuid, + iconFilename, + sampleFilename, + authorName: m.user?.name ?? null, + }); + console.log( + ` uuid=${m.aivm_model_uuid} icon=${iconFilename ?? "(none)"} sample=${sampleFilename ?? "(none)"}`, + ); + } + + const importLines: string[] = []; + const itemLines: string[] = []; + for (const [i, e] of entries.entries()) { + const iconSym = e.iconFilename ? `icon${i}` : null; + const sampleSym = e.sampleFilename ? `sample${i}` : null; + if (iconSym) { + importLines.push( + `import ${iconSym} from "renderer/assets/aivis-models/${e.iconFilename}";`, + ); + } + if (sampleSym) { + importLines.push( + `import ${sampleSym} from "renderer/assets/aivis-models/${e.sampleFilename}";`, + ); + } + itemLines.push( + `\t{ uuid: ${JSON.stringify(e.uuid)}, name: ${JSON.stringify(e.name)}, iconAsset: ${iconSym ?? "null"}, sampleAsset: ${sampleSym ?? "null"}, authorName: ${JSON.stringify(e.authorName)} },`, + ); + } + + const out = `// AUTO-GENERATED by scripts/sync-aivis-presets.ts +// Do not edit by hand. Re-run the script to refresh. +${importLines.join("\n")} + +export interface AivisPresetModel { + uuid: string; + name: string; + iconAsset: string | null; + sampleAsset: string | null; + authorName: string | null; +} + +export const AIVIS_PRESET_MODELS: AivisPresetModel[] = [ +${itemLines.join("\n")} +]; +`; + writeFileSync(OUT_FILE, out); + console.log(`\nWrote ${OUT_FILE} (${entries.length} entries)`); +} + +main().catch((err) => { + console.error(err); + process.exit(1); +}); diff --git a/apps/desktop/scripts/validate-native-runtime.ts b/apps/desktop/scripts/validate-native-runtime.ts index 5115b848f7f..6c6bdb2136e 100644 --- a/apps/desktop/scripts/validate-native-runtime.ts +++ b/apps/desktop/scripts/validate-native-runtime.ts @@ -19,6 +19,7 @@ import { const projectRoot = join(import.meta.dirname, ".."); const allowedBareRequirePackages = new Set([ "electron", + "source-map-support", ...mainExternalizedDependencies, ]); const builtinModuleSpecifiers = new Set([ diff --git a/apps/desktop/src/lib/ai/call-small-model.ts b/apps/desktop/src/lib/ai/call-small-model.ts new file mode 100644 index 00000000000..3cb67348f10 --- /dev/null +++ b/apps/desktop/src/lib/ai/call-small-model.ts @@ -0,0 +1,112 @@ +// FORK NOTE: upstream #3580 (#3580) replaced getSmallModelCandidates() with +// an async getSmallModel() that resolves a single model. This shim keeps the +// callSmallModel({ invoke }) interface that fork code (enhance-text.ts, +// git-operations.ts) expects, but now delegates to getSmallModel() instead of +// iterating a candidate list. Provider fallback and attempt tracking are +// simplified — getSmallModel() already handles the priority chain internally. +import { getSmallModel } from "@superset/chat/server/shared"; +import type { ProviderId, ProviderIssue } from "shared/ai/provider-status"; + +export type SmallModelCredentialKind = "api_key" | "oauth" | "env"; +export interface SmallModelCredential { + kind: SmallModelCredentialKind; + source?: string; +} + +export interface SmallModelAttempt { + providerId: ProviderId; + providerName: string; + credentialKind?: SmallModelCredentialKind; + credentialSource?: string; + issue?: ProviderIssue; + outcome: + | "missing-credentials" + | "expired-credentials" + | "unsupported-credentials" + | "empty-result" + | "failed" + | "succeeded"; + reason?: string; +} + +export interface SmallModelInvocationContext { + providerId: ProviderId; + providerName: string; + model: unknown; + credentials: SmallModelCredential; +} + +export async function callSmallModel({ + invoke, +}: { + invoke: ( + context: SmallModelInvocationContext, + ) => Promise; + providerOrder?: ProviderId[]; +}): Promise<{ + result: TResult | null; + attempts: SmallModelAttempt[]; +}> { + const model = await getSmallModel(); + + if (!model) { + return { + result: null, + attempts: [ + { + providerId: "anthropic", + providerName: "Anthropic", + outcome: "missing-credentials", + }, + { + providerId: "openai", + providerName: "OpenAI", + outcome: "missing-credentials", + }, + ], + }; + } + + try { + const result = await invoke({ + providerId: "anthropic", + providerName: "Anthropic", + model, + credentials: { kind: "api_key" }, + }); + if (result === null || result === undefined) { + return { + result: null, + attempts: [ + { + providerId: "anthropic", + providerName: "Anthropic", + outcome: "empty-result", + }, + ], + }; + } + return { + result, + attempts: [ + { + providerId: "anthropic", + providerName: "Anthropic", + outcome: "succeeded", + }, + ], + }; + } catch (error) { + return { + result: null, + attempts: [ + { + providerId: "anthropic", + providerName: "Anthropic", + outcome: "failed", + reason: error instanceof Error ? error.message : String(error), + }, + ], + }; + } +} diff --git a/apps/desktop/src/lib/electron-app/factories/app/setup.ts b/apps/desktop/src/lib/electron-app/factories/app/setup.ts index 05a1138209e..67385353019 100644 --- a/apps/desktop/src/lib/electron-app/factories/app/setup.ts +++ b/apps/desktop/src/lib/electron-app/factories/app/setup.ts @@ -1,5 +1,4 @@ import { app, BrowserWindow, shell } from "electron"; -import { env } from "main/env.main"; import { loadReactDevToolsExtension } from "main/lib/extensions"; import { PLATFORM } from "shared/constants"; import { makeAppId } from "shared/utils"; @@ -69,17 +68,18 @@ if (PLATFORM.IS_MAC) { PLATFORM.IS_WINDOWS && app.setAppUserModelId( - env.NODE_ENV === "development" ? process.execPath : makeAppId(), + process.env.NODE_ENV === "development" ? process.execPath : makeAppId(), ); app.commandLine.appendSwitch("force-color-profile", "srgb"); -// Only expose CDP in development when a port is explicitly configured. -const cdpPort = - env.NODE_ENV === "development" - ? process.env.DESKTOP_AUTOMATION_PORT - : undefined; -if (cdpPort) { - app.commandLine.appendSwitch("remote-debugging-port", cdpPort); - app.commandLine.appendSwitch("remote-allow-origins", "*"); -} +// Always expose CDP on a loopback port so the browser-mcp bridge can +// hand external browser automation MCPs (chrome-devtools-mcp, +// browser-use, playwright-mcp, …) a filtered per-pane CDP endpoint. +// DESKTOP_AUTOMATION_PORT overrides the random-port default for the +// existing desktop-automation integration. `*` here is safe because +// the actual gate is at the browser-mcp-bridge proxy level +// (token-authenticated, loopback-only). +const cdpPort = process.env.DESKTOP_AUTOMATION_PORT ?? "0"; +app.commandLine.appendSwitch("remote-debugging-port", cdpPort); +app.commandLine.appendSwitch("remote-allow-origins", "*"); diff --git a/apps/desktop/src/lib/electron/request-media-access.ts b/apps/desktop/src/lib/electron/request-media-access.ts new file mode 100644 index 00000000000..6588c3a821b --- /dev/null +++ b/apps/desktop/src/lib/electron/request-media-access.ts @@ -0,0 +1,54 @@ +import { shell, systemPreferences } from "electron"; + +// Only microphone / camera are meaningful here — the rest of +// SitePermissionKind (geolocation / notifications / clipboard-read) +// does not have a native macOS media-access equivalent. +type MediaKind = "microphone" | "camera"; + +const MEDIA_ACCESS_SETTINGS_URLS: Record = { + microphone: + "x-apple.systempreferences:com.apple.preference.security?Privacy_Microphone", + camera: + "x-apple.systempreferences:com.apple.preference.security?Privacy_Camera", +}; + +interface RequestMediaAccessResult { + granted: boolean; + openedSystemSettings: boolean; +} + +export async function requestMediaAccess( + kind: MediaKind, +): Promise { + if (process.platform !== "darwin") { + return { + granted: true, + openedSystemSettings: false, + }; + } + + try { + if (systemPreferences.getMediaAccessStatus(kind) === "granted") { + return { + granted: true, + openedSystemSettings: false, + }; + } + + const granted = await systemPreferences.askForMediaAccess(kind); + if (granted) { + return { + granted: true, + openedSystemSettings: false, + }; + } + } catch { + // Fall through to opening System Settings. + } + + await shell.openExternal(MEDIA_ACCESS_SETTINGS_URLS[kind]); + return { + granted: false, + openedSystemSettings: true, + }; +} diff --git a/apps/desktop/src/lib/errors.ts b/apps/desktop/src/lib/errors.ts new file mode 100644 index 00000000000..3636fba9c5a --- /dev/null +++ b/apps/desktop/src/lib/errors.ts @@ -0,0 +1,6 @@ +export class SessionDisposedError extends Error { + constructor() { + super("TypeScript session disposed"); + this.name = "SessionDisposedError"; + } +} diff --git a/apps/desktop/src/lib/trpc/index.ts b/apps/desktop/src/lib/trpc/index.ts index 5bb8b4686f3..2ebbe01081f 100644 --- a/apps/desktop/src/lib/trpc/index.ts +++ b/apps/desktop/src/lib/trpc/index.ts @@ -1,8 +1,10 @@ import { createTRPCReact } from "@trpc/react-query"; import { initTRPC } from "@trpc/server"; import superjson from "superjson"; +import { SessionDisposedError } from "../errors"; import type { AppRouter } from "./routers"; import { NotGitRepoError } from "./routers/workspaces/utils/git"; +import { WorktreePathMissingError } from "./routers/workspaces/utils/git-client"; /** * Core tRPC initialization @@ -29,8 +31,54 @@ const sentryMiddleware = t.middleware(async ({ next, path, type }) => { // Get the original error if it's wrapped in a TRPCError const originalError = error.cause instanceof Error ? error.cause : error; - // Don't report expected user conditions to Sentry - if (originalError instanceof NotGitRepoError) { + // Don't report expected user conditions to Sentry. + // These are races/lifecycle events, not bugs — reporting them floods + // the dashboard (ELECTRON-26/1Z hit 5000+ events in one session). + // The `.name` check catches errors re-thrown from worker threads + // (WorkerTaskError preserves the original name but not the class). + const errorName = + originalError instanceof Error ? originalError.name : null; + if ( + originalError instanceof NotGitRepoError || + originalError instanceof WorktreePathMissingError || + originalError instanceof SessionDisposedError || + errorName === "NotGitRepoError" || + errorName === "WorktreePathMissingError" || + errorName === "SessionDisposedError" + ) { + return result; + } + + // User-environment errors bubbled out through tRPC. These tell us + // nothing actionable about the app itself — they're about the user's + // disk, their gh CLI auth, or the remote they were pushing to. + const message = + originalError instanceof Error ? originalError.message : ""; + // NOTE: これらは「ユーザー環境起因」のノイズだけを握りつぶす意図。 + // 広いパターン (例: "Operation timed out" 単独、"Command failed: gh" の + // 全サブコマンド) を入れると、本来修正すべきアプリ側の呼び出しバグや + // 本家リポジトリ操作の不具合まで消してしまうので、外部ネットワーク/ + // 外部プロセスに帰着できる文脈 (ssh, remote repo push, gh auth) に + // 限定した文字列を重ねて使う。 + const USER_ENV_NOISE_PATTERNS = [ + // Disk full (ELECTRON-25) + "ENOSPC: no space left on device", + // gh CLI auth/network failures — auth / api / clone / pr view など + // 外部 GitHub への通信系だけに絞る (ELECTRON-R/18) + "Command failed: gh auth", + "Command failed: gh api", + "Command failed: gh repo clone", + "Command failed: gh pr view", + "Command failed: gh pr list", + // Git push rejections and remote connectivity (ELECTRON-P/16/21/22) + "the remote end hung up unexpectedly", + "ssh_dispatch_run_fatal", + "! [rejected]", + "failed to push some refs", + ]; + if ( + USER_ENV_NOISE_PATTERNS.some((pattern) => message.includes(pattern)) + ) { return result; } diff --git a/apps/desktop/src/lib/trpc/routers/agent-command-execution/index.ts b/apps/desktop/src/lib/trpc/routers/agent-command-execution/index.ts new file mode 100644 index 00000000000..d4b1d1271a8 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/agent-command-execution/index.ts @@ -0,0 +1,28 @@ +import { getAgentCommandExecutionCoordinator } from "main/lib/agent-command-execution-coordinator"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; + +const claimInput = z.object({ + commandId: z.string(), + timeoutAt: z.union([z.date(), z.string(), z.null()]).optional(), +}); + +const releaseInput = z.object({ + commandId: z.string(), +}); + +export const createAgentCommandExecutionRouter = () => { + return router({ + claim: publicProcedure.input(claimInput).mutation(({ input }) => { + const granted = getAgentCommandExecutionCoordinator().claim( + input.commandId, + input.timeoutAt, + ); + return { granted }; + }), + release: publicProcedure.input(releaseInput).mutation(({ input }) => { + getAgentCommandExecutionCoordinator().release(input.commandId); + return { released: true }; + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/aivis/index.ts b/apps/desktop/src/lib/trpc/routers/aivis/index.ts new file mode 100644 index 00000000000..4c513caf7d4 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/aivis/index.ts @@ -0,0 +1,415 @@ +import { randomUUID } from "node:crypto"; +import { TRPCError } from "@trpc/server"; +import { + AivisApiError, + AivisApiKeyMissingError, + aivisFetch, + aivisJson, +} from "main/lib/aivis/client"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; + +const WORD_TYPES = [ + "PROPER_NOUN", + "COMMON_NOUN", + "VERB", + "ADJECTIVE", + "SUFFIX", +] as const; + +function wrapApiError(err: unknown): TRPCError { + if (err instanceof AivisApiKeyMissingError) { + return new TRPCError({ + code: "PRECONDITION_FAILED", + message: "Aivis API key is not configured", + }); + } + if (err instanceof AivisApiError) { + return new TRPCError({ + code: err.status === 401 ? "UNAUTHORIZED" : "BAD_REQUEST", + message: err.message, + }); + } + return new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: err instanceof Error ? err.message : String(err), + }); +} + +const wordSchema = z.object({ + uuid: z.string().uuid(), + surface: z.array(z.string().min(1)).min(1), + pronunciation: z.array(z.string().min(1)).min(1), + accent_type: z.array(z.number().int().min(0)), + word_type: z.enum(WORD_TYPES).default("PROPER_NOUN"), + priority: z.number().int().min(0).max(10).default(5), +}); + +interface DictionaryListItem { + uuid: string; + name: string; + description: string; + word_count: number; + created_at: string; + updated_at: string; +} + +interface DictionaryDetail { + name: string; + description: string; + word_properties: Array<{ + uuid: string; + surface: string[]; + normalized_surface?: string[] | null; + pronunciation: string[]; + accent_type: number[]; + word_type: (typeof WORD_TYPES)[number]; + priority: number; + }>; + created_at: string; + updated_at: string; +} + +interface UsageSummary { + api_key_id: string; + api_key_name: string; + summary_date: string; + summary_hour: number; + request_count: number; + character_count: number; + credit_consumed: number; +} + +interface UserMeResponse { + handle?: string; + name?: string; + email?: string; + credit_balance?: number; + // Additional fields are ignored; we only surface balance + identity. + [key: string]: unknown; +} + +export const createAivisRouter = () => { + return router({ + /** Validate the key without persisting it; returns basic user info. */ + validateKey: publicProcedure + .input(z.object({ apiKey: z.string().min(1).optional() })) + .mutation(async ({ input }) => { + try { + const me = await aivisJson("/v1/users/me", { + apiKey: input.apiKey ?? undefined, + }); + return { + ok: true as const, + handle: typeof me.handle === "string" ? me.handle : null, + name: typeof me.name === "string" ? me.name : null, + creditBalance: + typeof me.credit_balance === "number" ? me.credit_balance : null, + }; + } catch (err) { + if (err instanceof AivisApiKeyMissingError) { + return { ok: false as const, reason: "missing" as const }; + } + if (err instanceof AivisApiError) { + return { + ok: false as const, + reason: err.status === 401 ? "unauthorized" : "api", + message: err.message, + } as const; + } + throw wrapApiError(err); + } + }), + + dictionary: router({ + list: publicProcedure.query(async () => { + try { + const json = await aivisJson<{ + user_dictionaries: DictionaryListItem[]; + }>("/v1/user-dictionaries"); + return json.user_dictionaries; + } catch (err) { + throw wrapApiError(err); + } + }), + + get: publicProcedure + .input(z.object({ uuid: z.string().uuid() })) + .query(async ({ input }) => { + try { + return await aivisJson( + `/v1/user-dictionaries/${input.uuid}`, + ); + } catch (err) { + throw wrapApiError(err); + } + }), + + create: publicProcedure + .input( + z.object({ + name: z.string().min(1).max(100), + description: z.string().max(500).default(""), + }), + ) + .mutation(async ({ input }) => { + const uuid = randomUUID(); + try { + await aivisFetch(`/v1/user-dictionaries/${uuid}`, { + method: "PUT", + json: { + name: input.name, + description: input.description, + word_properties: [], + }, + }); + return { uuid }; + } catch (err) { + throw wrapApiError(err); + } + }), + + update: publicProcedure + .input( + z.object({ + uuid: z.string().uuid(), + name: z.string().min(1).max(100), + description: z.string().max(500).default(""), + words: z.array(wordSchema), + }), + ) + .mutation(async ({ input }) => { + try { + await aivisFetch(`/v1/user-dictionaries/${input.uuid}`, { + method: "PUT", + json: { + name: input.name, + description: input.description, + word_properties: input.words, + }, + }); + return { success: true }; + } catch (err) { + throw wrapApiError(err); + } + }), + + delete: publicProcedure + .input(z.object({ uuid: z.string().uuid() })) + .mutation(async ({ input }) => { + try { + await aivisFetch(`/v1/user-dictionaries/${input.uuid}`, { + method: "DELETE", + }); + return { success: true }; + } catch (err) { + throw wrapApiError(err); + } + }), + + export: publicProcedure + .input(z.object({ uuid: z.string().uuid() })) + .query(async ({ input }) => { + try { + return await aivisJson>( + `/v1/user-dictionaries/${input.uuid}/export`, + ); + } catch (err) { + throw wrapApiError(err); + } + }), + + import: publicProcedure + .input( + z.object({ + uuid: z.string().uuid(), + data: z.record(z.string(), z.unknown()), + override: z.boolean().default(false), + }), + ) + .mutation(async ({ input }) => { + try { + await aivisFetch(`/v1/user-dictionaries/${input.uuid}/import`, { + method: "POST", + query: { override: input.override }, + json: input.data, + }); + return { success: true }; + } catch (err) { + throw wrapApiError(err); + } + }), + }), + + usage: router({ + daily: publicProcedure + .input( + z.object({ + startDate: z.string().regex(/^\d{4}-\d{2}-\d{2}$/), + endDate: z.string().regex(/^\d{4}-\d{2}-\d{2}$/), + }), + ) + .query(async ({ input }) => { + try { + const json = await aivisJson<{ summaries: UsageSummary[] }>( + "/v1/payment/usage-summaries", + { + query: { + start_date: input.startDate, + end_date: input.endDate, + }, + }, + ); + + const byDate = new Map< + string, + { + date: string; + requestCount: number; + characterCount: number; + creditConsumed: number; + byApiKey: Record< + string, + { + name: string; + requestCount: number; + characterCount: number; + creditConsumed: number; + } + >; + } + >(); + + for (const s of json.summaries) { + const entry = byDate.get(s.summary_date) ?? { + date: s.summary_date, + requestCount: 0, + characterCount: 0, + creditConsumed: 0, + byApiKey: {}, + }; + entry.requestCount += s.request_count; + entry.characterCount += s.character_count; + entry.creditConsumed += s.credit_consumed; + + const bucket = entry.byApiKey[s.api_key_id] ?? { + name: s.api_key_name, + requestCount: 0, + characterCount: 0, + creditConsumed: 0, + }; + bucket.requestCount += s.request_count; + bucket.characterCount += s.character_count; + bucket.creditConsumed += s.credit_consumed; + entry.byApiKey[s.api_key_id] = bucket; + + byDate.set(s.summary_date, entry); + } + + const days = [...byDate.values()].sort((a, b) => + a.date.localeCompare(b.date), + ); + const total = days.reduce( + (acc, d) => ({ + requestCount: acc.requestCount + d.requestCount, + characterCount: acc.characterCount + d.characterCount, + creditConsumed: acc.creditConsumed + d.creditConsumed, + }), + { requestCount: 0, characterCount: 0, creditConsumed: 0 }, + ); + + return { days, total }; + } catch (err) { + throw wrapApiError(err); + } + }), + + me: publicProcedure.query(async () => { + try { + const me = await aivisJson("/v1/users/me"); + return { + handle: typeof me.handle === "string" ? me.handle : null, + name: typeof me.name === "string" ? me.name : null, + creditBalance: + typeof me.credit_balance === "number" ? me.credit_balance : null, + }; + } catch (err) { + throw wrapApiError(err); + } + }), + }), + + model: router({ + get: publicProcedure + .input(z.object({ uuid: z.string().uuid() })) + .query(async ({ input }) => { + try { + const m = await aivisJson( + `/v1/aivm-models/${input.uuid}`, + { optionalAuth: true }, + ); + return summarizeModel(m); + } catch (err) { + throw wrapApiError(err); + } + }), + + searchByName: publicProcedure + .input(z.object({ name: z.string().min(1).max(100) })) + .query(async ({ input }) => { + try { + const json = await aivisJson<{ + aivm_models: AivmModelResponse[]; + }>("/v1/aivm-models/search", { + optionalAuth: true, + query: { keyword: input.name, limit: 5 }, + }); + const models = json.aivm_models ?? []; + const exact = models.find((m) => m.name === input.name); + const match = exact ?? models[0]; + return match ? summarizeModel(match) : null; + } catch (err) { + throw wrapApiError(err); + } + }), + }), + }); +}; + +interface AivmStyle { + voice_samples?: Array<{ audio_url?: string | null }>; +} +interface AivmSpeaker { + aivm_speaker_uuid: string; + name: string; + icon_url?: string | null; + styles?: AivmStyle[]; +} +interface AivmUser { + handle?: string; + name?: string; + icon_url?: string | null; +} +interface AivmModelResponse { + aivm_model_uuid: string; + name: string; + description?: string; + user?: AivmUser; + speakers?: AivmSpeaker[]; +} + +function summarizeModel(m: AivmModelResponse) { + const speakerIcon = m.speakers?.[0]?.icon_url ?? null; + const userIcon = m.user?.icon_url ?? null; + const sampleUrl = + m.speakers?.[0]?.styles?.[0]?.voice_samples?.[0]?.audio_url ?? null; + return { + uuid: m.aivm_model_uuid, + name: m.name, + description: m.description ?? "", + iconUrl: speakerIcon ?? userIcon, + sampleUrl, + authorName: m.user?.name ?? null, + authorHandle: m.user?.handle ?? null, + }; +} diff --git a/apps/desktop/src/lib/trpc/routers/browser-automation/index.ts b/apps/desktop/src/lib/trpc/routers/browser-automation/index.ts new file mode 100644 index 00000000000..6ea584a03ee --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/browser-automation/index.ts @@ -0,0 +1,767 @@ +import { EventEmitter } from "node:events"; +import { existsSync, readFileSync } from "node:fs"; +import { homedir } from "node:os"; +import { join, resolve as resolvePath } from "node:path"; +import { + browserAutomationBindings, + projects, + type SelectBrowserAutomationBinding, + workspaces, + worktrees, +} from "@superset/local-db"; +import { observable } from "@trpc/server/observable"; +import { and, eq, ne } from "drizzle-orm"; +import { app } from "electron"; +import { localDb } from "main/lib/local-db"; +import { + getProcessCommand, + getProcessName, + getProcessTree, +} from "main/lib/terminal/port-scanner"; +import { getTerminalHostClient } from "main/lib/terminal-host/client"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; + +/** + * Browser automation bindings router. + * + * Bindings persist in local-db so they survive app restarts: the terminal + * daemon re-attaches terminal panes and TODO-Agent sessions keep running, + * so losing the binding would force a re-connect on every launch. + * + * Also exposes MCP-readiness detection by reading the user's agent config + * files (Claude Code / Codex) for the `superset-browser` entry. + */ + +export type BrowserAutomationBinding = SelectBrowserAutomationBinding; + +class BindingStore { + private readonly emitter = new EventEmitter(); + + constructor() { + // One subscription per renderer hook instance; a workspace with many + // open panes can blow past Node's 10-listener default otherwise. + this.emitter.setMaxListeners(0); + } + + list(): BrowserAutomationBinding[] { + return localDb.select().from(browserAutomationBindings).all(); + } + + get(paneId: string): BrowserAutomationBinding | null { + return ( + localDb + .select() + .from(browserAutomationBindings) + .where(eq(browserAutomationBindings.paneId, paneId)) + .get() ?? null + ); + } + + getBySessionId(sessionId: string): BrowserAutomationBinding | null { + return ( + localDb + .select() + .from(browserAutomationBindings) + .where(eq(browserAutomationBindings.sessionId, sessionId)) + .get() ?? null + ); + } + + set( + paneId: string, + sessionId: string, + sessionKind: string, + ): { previousPaneId: string | null } { + // Remove any existing binding that points at the same session on a + // different pane so we enforce 1 session ↔ 1 pane. + const existingOtherPane = localDb + .select() + .from(browserAutomationBindings) + .where( + and( + eq(browserAutomationBindings.sessionId, sessionId), + ne(browserAutomationBindings.paneId, paneId), + ), + ) + .get(); + const previousPaneId = existingOtherPane?.paneId ?? null; + if (previousPaneId) { + localDb + .delete(browserAutomationBindings) + .where(eq(browserAutomationBindings.paneId, previousPaneId)) + .run(); + } + const row = { + paneId, + sessionId, + sessionKind, + connectedAt: Date.now(), + }; + // Drizzle SQLite upsert via onConflictDoUpdate + localDb + .insert(browserAutomationBindings) + .values(row) + .onConflictDoUpdate({ + target: browserAutomationBindings.paneId, + set: { + sessionId: row.sessionId, + sessionKind: row.sessionKind, + connectedAt: row.connectedAt, + }, + }) + .run(); + this.emitChange(); + return { previousPaneId }; + } + + remove(paneId: string): boolean { + const result = localDb + .delete(browserAutomationBindings) + .where(eq(browserAutomationBindings.paneId, paneId)) + .run(); + if (result.changes > 0) { + this.emitChange(); + return true; + } + return false; + } + + private emitChange() { + this.emitter.emit("change", this.list()); + } + + onChange(cb: (bindings: BrowserAutomationBinding[]) => void): () => void { + this.emitter.on("change", cb); + return () => { + this.emitter.off("change", cb); + }; + } +} + +export const bindingStore = new BindingStore(); + +const SERVER_NAME = "superset-browser"; + +function isEnabledMcpEntry( + value: unknown, + expected?: { command: string; args: string[] }, +): boolean { + if (value == null || typeof value !== "object") return false; + const entry = value as Record; + if (entry.disabled === true) return false; + const hasShape = + typeof entry.command === "string" || + typeof entry.url === "string" || + Array.isArray(entry.args); + if (!hasShape) return false; + // When we know the canonical command the app wants to install (the + // bundled binary path), require the registered entry to match. That + // way a legacy `desktop-mcp` / `superset-browser-mcp` registration + // isn't reported as ready and the UI prompts the user to re-install + // against the current bundled binary. Absence of expected means the + // shape check alone is enough (for callers that do not care yet). + if (!expected) return true; + if (entry.command !== expected.command) return false; + const rawArgs = Array.isArray(entry.args) + ? (entry.args as unknown[]).map(String) + : []; + if (rawArgs.length !== expected.args.length) return false; + for (let i = 0; i < rawArgs.length; i++) { + if (rawArgs[i] !== expected.args[i]) return false; + } + return true; +} + +/** + * Claude Code writes MCP server definitions into several possible files: + * - `~/.claude.json` (user scope, written by `claude mcp add`) + * - `~/.claude/settings.json` (legacy / hooks-oriented) + * - `/.mcp.json` (project scope) + * We inspect all of them and accept the server if any file contains an + * enabled entry. Each file is parsed as JSON and we look under + * `mcpServers[name]`. + */ +function mcpServersInObject(obj: unknown): Record | null { + if (!obj || typeof obj !== "object") return null; + const candidate = (obj as Record).mcpServers; + if (!candidate || typeof candidate !== "object") return null; + return candidate as Record; +} + +/** + * Claude `~/.claude.json` holds MCP entries in two places: + * - top-level `mcpServers[name]` (user scope) + * - `projects[].mcpServers[name]` (local scope, default for + * `claude mcp add`) + * We accept either. Other config files (`.claude/settings.json`, + * `/.mcp.json`) only use the top-level shape. + */ +function detectClaudeMcpInFile( + filePath: string, + opts?: { + workspacePaths?: readonly string[]; + expected?: { command: string; args: string[] }; + }, +): boolean { + try { + const contents = readFileSync(filePath, "utf8"); + const parsed = JSON.parse(contents) as unknown; + const topLevel = mcpServersInObject(parsed); + if (topLevel && isEnabledMcpEntry(topLevel[SERVER_NAME], opts?.expected)) + return true; + const projects = (parsed as Record | null)?.projects; + if (projects && typeof projects === "object" && opts?.workspacePaths) { + for (const wsPath of opts.workspacePaths) { + const project = (projects as Record)[wsPath]; + const entries = mcpServersInObject(project); + if (entries && isEnabledMcpEntry(entries[SERVER_NAME], opts?.expected)) + return true; + } + } + return false; + } catch { + return false; + } +} + +function detectClaudeMcp( + paths: readonly string[], + opts?: { + workspacePaths?: readonly string[]; + expected?: { command: string; args: string[] }; + }, +): boolean { + return paths.some((p) => detectClaudeMcpInFile(p, opts)); +} + +/** + * Resolve the `.mcp.json` path for each workspace keyed by workspaceId, + * so per-project MCP definitions (output of `claude mcp add -s project`) + * can be considered per-session without letting one configured project + * make sessions from other projects look ready. + */ +interface WorkspacePathInfo { + base: string; + mcpJsonPath: string; +} + +function collectWorkspacePathsByWorkspaceId(): Record< + string, + WorkspacePathInfo +> { + try { + const rows = localDb + .select({ + workspaceId: workspaces.id, + worktreePath: worktrees.path, + mainRepoPath: projects.mainRepoPath, + }) + .from(workspaces) + .leftJoin(projects, eq(projects.id, workspaces.projectId)) + .leftJoin(worktrees, eq(worktrees.id, workspaces.worktreeId)) + .all(); + const out: Record = {}; + for (const row of rows) { + const base = row.worktreePath ?? row.mainRepoPath ?? null; + if (row.workspaceId && base) { + out[row.workspaceId] = { + base, + mcpJsonPath: join(base, ".mcp.json"), + }; + } + } + return out; + } catch { + return {}; + } +} + +/** + * Codex: ~/.codex/config.toml uses `[mcp_servers.]` table sections. + * We avoid pulling in a TOML parser just for this one check — instead we + * isolate the `[mcp_servers.superset-browser]` section and verify it has + * at least one usable field (`command`, `url`, `args`) and is not marked + * `disabled = true`. Comment lines (starting with `#`) are ignored. + */ +function unescapeTomlBasicString(raw: string): string { + // Minimal TOML basic-string unescape: handles the standard sequences + // users actually write for Windows paths (backslashes) and shell + // invocations. Not a full TOML parser but enough for command / args + // values that come out of `codex mcp add`. + return raw.replace( + /\\(["\\bfnrt]|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8})/g, + (_, esc) => { + switch (esc) { + case "\\": + return "\\"; + case '"': + return '"'; + case "b": + return "\b"; + case "f": + return "\f"; + case "n": + return "\n"; + case "r": + return "\r"; + case "t": + return "\t"; + default: { + const hex = esc.slice(1); + const code = Number.parseInt(hex, 16); + return Number.isFinite(code) ? String.fromCodePoint(code) : ""; + } + } + }, + ); +} + +function extractTomlStrings(line: string | undefined): string[] { + if (!line) return []; + const out: string[] = []; + // Match basic strings "…" (with escapes) and literal strings '…' + // (no escape processing). Both are valid TOML. + const re = /"((?:\\.|[^"\\])*)"|'([^']*)'/g; + for (let m = re.exec(line); m !== null; m = re.exec(line)) { + if (m[1] !== undefined) out.push(unescapeTomlBasicString(m[1])); + else if (m[2] !== undefined) out.push(m[2]); + } + return out; +} + +function parseFirstTomlString(line: string | undefined): string { + return extractTomlStrings(line)[0] ?? ""; +} + +function parseAllTomlStrings(line: string | undefined): string[] { + return extractTomlStrings(line); +} + +function detectCodexMcp( + filePath: string, + expected?: { command: string; args: string[] }, +): boolean { + try { + const contents = readFileSync(filePath, "utf8"); + // TOML accepts several equivalent header forms for the same table. + const q = `["']`; + const name = SERVER_NAME.replace(/[-/\\^$*+?.()|[\]{}]/g, "\\$&"); + const sectionRe = new RegExp( + String.raw`(^|\n)\[\s*(?:mcp_servers\.(?:${name}|${q}${name}${q})|${q}mcp_servers${q}\.${name})\s*\]\s*\n([\s\S]*?)(?=\n\[|$)`, + ); + const match = contents.match(sectionRe); + if (!match) return false; + const body = match[2] + .split("\n") + .map((line) => line.trim()) + .filter((line) => line.length > 0 && !line.startsWith("#")); + if (body.some((line) => /^disabled\s*=\s*true\b/.test(line))) return false; + const hasShape = body.some((line) => /^(command|url|args)\s*=/.test(line)); + if (!hasShape) return false; + if (!expected) return true; + const commandLine = body.find((line) => /^command\s*=/.test(line)); + const argsLine = body.find((line) => /^args\s*=/.test(line)); + const command = parseFirstTomlString(commandLine); + const args = parseAllTomlStrings(argsLine); + if (command !== expected.command) return false; + if (args.length !== expected.args.length) return false; + for (let i = 0; i < args.length; i++) { + if (args[i] !== expected.args[i]) return false; + } + return true; + } catch { + return false; + } +} + +const CLAUDE_USER_JSON_PATH = join(homedir(), ".claude.json"); +const CLAUDE_SETTINGS_JSON_PATH = join(homedir(), ".claude", "settings.json"); +const CLAUDE_CONFIG_PATHS = [CLAUDE_USER_JSON_PATH, CLAUDE_SETTINGS_JSON_PATH]; +const CODEX_CONFIG_PATH = join(homedir(), ".codex", "config.toml"); + +export interface TerminalAgentSession { + paneId: string; + workspaceId: string; + pid: number; + provider: "Claude" | "Codex"; + command: string; + lastAttachedAt?: string; +} + +/** + * Walk every live terminal session's PTY process tree and return the ones + * that currently have a `claude` or `codex` child process. Used so the + * Browser Automation UI can treat "the claude I started in this terminal + * tab" as an LLM session that is connectable to a browser pane. + */ +async function detectTerminalAgentSessions(): Promise { + let sessions: Awaited< + ReturnType["listSessions"]> + >["sessions"]; + try { + const client = getTerminalHostClient(); + const response = await client.listSessions(); + sessions = response.sessions; + } catch (error) { + // Terminal-host daemon is intermittently unavailable (restart races, + // IPC errors). Degrade gracefully so liveness data for non-terminal + // bindings is still returned instead of rejecting the whole query. + console.warn( + "[browser-automation] terminal listSessions failed, skipping terminal probe:", + error, + ); + return []; + } + const out: TerminalAgentSession[] = []; + await Promise.all( + sessions.map(async (s) => { + if (!s.isAlive || typeof s.pid !== "number") return; + const pids = await getProcessTree(s.pid); + // Skip the shell itself (root pid). For each child we read BOTH + // comm (short name) AND args (full argv). Many claude / codex + // installs appear as comm=node with args=node /usr/local/.../claude, + // so a comm-only match misses them. + const probes = await Promise.all( + pids + .filter((p) => p !== s.pid) + .map(async (p) => { + const [name, command] = await Promise.all([ + getProcessName(p), + getProcessCommand(p), + ]); + return { pid: p, name, command }; + }), + ); + const match = probes.find((p) => classifyAgent(p.name, p.command)); + if (!match) return; + const provider = classifyAgent(match.name, match.command); + if (!provider) return; + out.push({ + paneId: s.paneId, + workspaceId: s.workspaceId, + pid: match.pid, + provider, + command: match.name, + lastAttachedAt: s.lastAttachedAt, + }); + }), + ); + return out; +} + +/** + * Is the process a `claude` or `codex` CLI? Checks both the short + * process name and the full argv. The CLIs are commonly installed as + * thin wrappers that `exec node /path/to/bin/claude ...`, which makes + * the short name "node" — argv catches that. + */ +function classifyAgent( + name: string, + command: string, +): "Claude" | "Codex" | null { + const lname = name.toLowerCase(); + if (lname === "codex") return "Codex"; + if (lname === "claude") return "Claude"; + // Fall back to argv matching. Only accept tokens whose basename is + // exactly claude / codex (so a random node script that has "claude" + // as a substring does not match). + const tokens = command.split(/\s+/).filter(Boolean); + for (const token of tokens) { + const base = token.replace(/\\/g, "/").split("/").pop() ?? token; + if (base === "codex" || base === "codex.js") return "Codex"; + if (base === "claude" || base === "claude.js") return "Claude"; + } + return null; +} + +/** + * Resolve the `superset-browser-mcp` bin that a Claude / Codex session + * should spawn. In dev we return `bun run /packages/superset-browser-mcp/src/bin.ts` + * so the snippet shown in the Connect modal is copy-pasteable without + * requiring a global install. In packaged production builds the source + * tree is not available; we fall back to the bare name so a future + * published npm package still produces a usable snippet. + */ +function resolveSupersetBrowserMcpCommand(): { + command: string; + args: string[]; + available: boolean; +} { + if (app.isPackaged) { + // Standalone binary shipped alongside the app (see electron-builder + // extraResources `to: "resources/superset-browser-mcp"`). On macOS + // process.resourcesPath is /Contents/Resources, so the final + // layout is /Contents/Resources/resources/superset-browser-mcp/. + const binName = + process.platform === "win32" + ? "superset-browser-mcp.exe" + : "superset-browser-mcp"; + const binPath = join( + process.resourcesPath, + "resources", + "superset-browser-mcp", + binName, + ); + if (existsSync(binPath)) { + return { command: binPath, args: [], available: true }; + } + return { + command: binPath, + args: [], + available: false, + }; + } + const repoRoot = resolvePath(app.getAppPath(), "../.."); + const binPath = join(repoRoot, "packages/superset-browser-mcp/src/bin.ts"); + if (existsSync(binPath)) { + return { command: "bun", args: ["run", binPath], available: true }; + } + return { + command: "bun", + args: ["run", binPath], + available: false, + }; +} + +export const createBrowserAutomationRouter = () => { + return router({ + getMcpStatus: publicProcedure.query(() => { + // Claude readiness is resolved in two dimensions so a single + // configured project never makes sessions from other projects + // look ready: + // - `claudeHomeReady`: only the top-level (user-scope) + // mcpServers in $HOME files. + // - `claudeReadyByWorkspaceId`: for each workspace, check + // * `~/.claude.json` under `projects[]` + // (local scope, where `claude mcp add` lands by default) + // * `/.mcp.json` (project scope) + // Only accept entries that point at *this* install's bundled + // binary. An older desktop-mcp / legacy superset-browser-mcp + // registration from a prior build would otherwise be reported + // as ready and the UI would enable Connect against a command + // that does not exist. + const expected = resolveSupersetBrowserMcpCommand(); + const claudeHomeReady = detectClaudeMcp(CLAUDE_CONFIG_PATHS, { + expected, + }); + const wsInfo = collectWorkspacePathsByWorkspaceId(); + const claudeReadyByWorkspaceId: Record = {}; + for (const [workspaceId, info] of Object.entries(wsInfo)) { + const localScope = detectClaudeMcpInFile(CLAUDE_USER_JSON_PATH, { + workspacePaths: [info.base], + expected, + }); + const projectScope = detectClaudeMcpInFile(info.mcpJsonPath, { + expected, + }); + claudeReadyByWorkspaceId[workspaceId] = localScope || projectScope; + } + const codexReady = detectCodexMcp(CODEX_CONFIG_PATH, expected); + return { + claudeHomeReady, + claudeReadyByWorkspaceId, + codexReady, + claudeConfigPath: CLAUDE_USER_JSON_PATH, + codexConfigPath: CODEX_CONFIG_PATH, + serverCommand: expected, + }; + }), + + listTerminalAgentSessions: publicProcedure.query(() => + detectTerminalAgentSessions(), + ), + + listBindings: publicProcedure.query(() => bindingStore.list()), + + /** + * Cheap resolver used by the per-pane `ConnectButton` to decide + * whether a stored binding still maps to a live worker. Runs once + * per window (React Query dedupes the call) so many Connect buttons + * cost one main-process query total. Terminal bindings are resolved + * by scanning the PTY process tree; TODO-Agent bindings by matching + * against the live status whitelist. + */ + listBindingLiveness: publicProcedure.query(async () => { + // Sweep out any persisted todo-agent bindings — those were + // allowed by an earlier build but the MCP bridge cannot resolve + // them yet. Leaving them would show up as "Connected" on the + // ConnectButton even though no session is reachable. After the + // sweep, re-read. + const stored = bindingStore.list(); + for (const b of stored) { + if (b.sessionKind === "todo-agent") { + bindingStore.remove(b.paneId); + } + } + const bindings = bindingStore.list(); + if (bindings.length === 0) + return [] as Array<{ + paneId: string; + sessionId: string; + sessionKind: string; + live: boolean; + }>; + const hasTerminalBinding = bindings.some( + (b) => b.sessionKind === "terminal", + ); + // Only probe the terminal daemon when at least one binding actually + // points at a terminal — otherwise every Connect button's 15s poll + // would wake the terminal-host and walk every PTY's process tree. + const liveTerminalIds = hasTerminalBinding + ? new Set( + (await detectTerminalAgentSessions()).map( + (t) => `terminal:${t.paneId}`, + ), + ) + : new Set(); + return bindings.map((b) => { + const live = + b.sessionKind === "terminal" + ? liveTerminalIds.has(b.sessionId) + : false; + return { + paneId: b.paneId, + sessionId: b.sessionId, + sessionKind: b.sessionKind, + live, + }; + }); + }), + + getBindingByPane: publicProcedure + .input(z.object({ paneId: z.string() })) + .query(({ input }) => bindingStore.get(input.paneId)), + + getBindingBySession: publicProcedure + .input(z.object({ sessionId: z.string() })) + .query(({ input }) => bindingStore.getBySessionId(input.sessionId)), + + setBinding: publicProcedure + .input( + z.object({ + paneId: z.string(), + sessionId: z.string(), + sessionKind: z.enum(["todo-agent", "terminal"]).default("terminal"), + }), + ) + .mutation(({ input }) => { + // TODO-Agent workers live in the todo-daemon process; the + // browser-mcp bridge in main can't resolve their PIDs yet. + // Reject the binding instead of letting users create one + // whose MCP tool calls would always error. + if (input.sessionKind === "todo-agent") { + throw new Error( + "TODO-Agent browser automation bindings are not supported yet. Run claude / codex in a Superset terminal pane instead.", + ); + } + return bindingStore.set( + input.paneId, + input.sessionId, + input.sessionKind, + ); + }), + + removeBinding: publicProcedure + .input(z.object({ paneId: z.string() })) + .mutation(({ input }) => ({ + removed: bindingStore.remove(input.paneId), + })), + + getMcpInstallState: publicProcedure.query(async () => { + const { getInstallState } = await import( + "main/lib/browser-mcp-bridge/mcp-installer" + ); + return getInstallState(resolveSupersetBrowserMcpCommand()); + }), + + installMcp: publicProcedure + .input( + z.object({ + targets: z.array(z.enum(["claude", "codex"])).min(1), + }), + ) + .mutation(async ({ input }) => { + const server = resolveSupersetBrowserMcpCommand(); + if (!server.available) { + throw new Error( + "The bundled superset-browser-mcp binary is not available in this build.", + ); + } + const { installMcp } = await import( + "main/lib/browser-mcp-bridge/mcp-installer" + ); + return installMcp(input.targets, server); + }), + + /** + * Resolve the per-session filtered CDP endpoint directly from the + * UI (no MCP round-trip). Used by the Connect dialog to show a + * copy-ready URL and example commands for external browser MCPs. + */ + getCdpEndpointForSession: publicProcedure + .input(z.object({ sessionId: z.string() })) + .query(async ({ input }) => { + const binding = bindingStore.getBySessionId(input.sessionId); + if (!binding) { + return { available: false as const, reason: "not-bound" as const }; + } + const { browserManager } = await import( + "main/lib/browser/browser-manager" + ); + const targetId = browserManager.getCdpTargetId(binding.paneId); + if (!targetId) { + return { + available: false as const, + reason: "target-not-ready" as const, + }; + } + const { resolveCdpPort } = await import( + "main/lib/browser-mcp-bridge/cdp-port" + ); + const cdpPort = await resolveCdpPort(); + if (!cdpPort) { + return { + available: false as const, + reason: "cdp-disabled" as const, + }; + } + const { getBrowserMcpBridge } = await import( + "main/lib/browser-mcp-bridge/server" + ); + const { getGlobalBrowserUseConfigPath } = await import( + "main/lib/browser-mcp-bridge/cdp-gateway" + ); + const bridge = getBrowserMcpBridge(); + if (!bridge) { + return { + available: false as const, + reason: "bridge-not-running" as const, + }; + } + // The URL is the same for every LLM session; per-connection + // peer-PID resolution is how the gateway knows which pane + // to route to. That is why registering these MCPs once is + // enough even across Superset restarts, pane rebindings, + // and new terminal panes. + return { + available: true as const, + paneId: binding.paneId, + targetId, + httpBase: `http://127.0.0.1:${bridge.port}`, + wsEndpoint: `ws://127.0.0.1:${bridge.port}/devtools/page/${targetId}`, + browserUseConfigPath: getGlobalBrowserUseConfigPath(), + }; + }), + + onBindingsChanged: publicProcedure.subscription(() => { + return observable((emit) => { + emit.next(bindingStore.list()); + const off = bindingStore.onChange((list) => emit.next(list)); + return () => { + off(); + }; + }); + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/browser-permissions/index.ts b/apps/desktop/src/lib/trpc/routers/browser-permissions/index.ts new file mode 100644 index 00000000000..c5f3e469d6f --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/browser-permissions/index.ts @@ -0,0 +1,66 @@ +import { observable } from "@trpc/server/observable"; +import { + PERMISSION_TOGGLE_KEYS, + PERMISSION_TOGGLE_META, + type PermissionConfig, + type PermissionToggleKey, + permissionStore, +} from "main/lib/browser-mcp-bridge/permissions"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; + +const togglesSchema = z + .record(z.string(), z.boolean()) + .transform((v): Partial> => { + const out: Partial> = {}; + for (const k of PERMISSION_TOGGLE_KEYS) { + if (k in v) out[k] = v[k]; + } + return out; + }); + +export const createBrowserPermissionsRouter = () => { + return router({ + getConfig: publicProcedure.query(() => permissionStore.getConfig()), + getToggleMeta: publicProcedure.query(() => PERMISSION_TOGGLE_META), + setActive: publicProcedure + .input(z.object({ presetId: z.string() })) + .mutation(({ input }) => { + permissionStore.setActive(input.presetId); + return permissionStore.getConfig(); + }), + savePreset: publicProcedure + .input( + z.object({ + id: z.string().optional(), + name: z.string().min(1).max(64), + toggles: togglesSchema, + }), + ) + .mutation(({ input }) => { + return permissionStore.savePreset({ + id: input.id, + name: input.name, + toggles: input.toggles, + }); + }), + deletePreset: publicProcedure + .input(z.object({ id: z.string() })) + .mutation(({ input }) => { + permissionStore.deletePreset(input.id); + return permissionStore.getConfig(); + }), + onChange: publicProcedure.subscription(() => { + return observable((emit) => { + const handler = (config: PermissionConfig) => emit.next(config); + permissionStore.on("change", handler); + // Prime the subscription so clients get current state + // without an extra query round-trip. + emit.next(permissionStore.getConfig()); + return () => { + permissionStore.off("change", handler); + }; + }); + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/browser/browser.ts b/apps/desktop/src/lib/trpc/routers/browser/browser.ts index 50681573e85..5f6552dd67a 100644 --- a/apps/desktop/src/lib/trpc/routers/browser/browser.ts +++ b/apps/desktop/src/lib/trpc/routers/browser/browser.ts @@ -1,6 +1,12 @@ +import { + SITE_PERMISSION_KINDS, + SITE_PERMISSION_VALUES, +} from "@superset/local-db"; import { observable } from "@trpc/server/observable"; -import { session } from "electron"; +import { session, webContents } from "electron"; +import { requestMediaAccess } from "lib/electron/request-media-access"; import { browserManager } from "main/lib/browser/browser-manager"; +import { browserSitePermissionManager } from "main/lib/browser/browser-site-permission-manager"; import { z } from "zod"; import { publicProcedure, router } from "../.."; @@ -20,6 +26,104 @@ export const createBrowserRouter = () => { return { success: true }; }), + registerTab: publicProcedure + .input( + z.object({ + paneId: z.string(), + tabId: z.string(), + webContentsId: z.number(), + }), + ) + .mutation(({ input }) => { + browserManager.registerTab( + input.paneId, + input.tabId, + input.webContentsId, + ); + return { success: true }; + }), + + unregisterTab: publicProcedure + .input(z.object({ paneId: z.string(), tabId: z.string() })) + .mutation(({ input }) => { + browserManager.unregisterTab(input.paneId, input.tabId); + return { success: true }; + }), + + /** + * Subscribe to external-create-tab requests for a pane. The CDP + * gateway emits one of these whenever an external MCP issues + * `Target.createTarget`; the renderer-side BrowserPane picks it + * up and creates a real tab in its registry, which then + * registers back via registerTab and populates the pane's tab + * target set. + */ + onCreateTabRequested: publicProcedure + .input(z.object({ paneId: z.string() })) + .subscription(({ input }) => { + return observable<{ + url: string; + requestId?: string; + background?: boolean; + }>((emit) => { + const handler = (data: { + url: string; + requestId?: string; + background?: boolean; + }) => emit.next(data); + browserManager.on(`create-tab-requested:${input.paneId}`, handler); + return () => { + browserManager.off(`create-tab-requested:${input.paneId}`, handler); + }; + }); + }), + + /** + * Subscribe to MCP-driven tab activation requests for a pane. + * The CDP filter emits these when an external MCP sends + * Target.activateTarget or Page.bringToFront; the renderer + * flips its tab-bar UI to match. tabId=null means the pane's + * primary (the non-secondary tab managed by + * usePersistentWebview). + */ + onActivateTabRequested: publicProcedure + .input(z.object({ paneId: z.string() })) + .subscription(({ input }) => { + return observable<{ tabId: string | null }>((emit) => { + const handler = (data: { tabId: string | null }) => emit.next(data); + browserManager.on(`activate-tab-requested:${input.paneId}`, handler); + return () => { + browserManager.off( + `activate-tab-requested:${input.paneId}`, + handler, + ); + }; + }); + }), + + /** + * Called by the renderer after spawning the secondary tab so + * the gateway can correlate concurrent Target.createTarget + * requests with their respective new tab targetIds. `requestId` + * came in on the matching create-tab-requested event. + */ + acknowledgeTabCreated: publicProcedure + .input( + z.object({ + paneId: z.string(), + requestId: z.string(), + tabId: z.string(), + }), + ) + .mutation(({ input }) => { + browserManager.acknowledgeTabCreated( + input.paneId, + input.requestId, + input.tabId, + ); + return { success: true }; + }), + navigate: publicProcedure .input(z.object({ paneId: z.string(), url: z.string() })) .mutation(({ input }) => { @@ -63,6 +167,42 @@ export const createBrowserRouter = () => { return { base64 }; }), + print: publicProcedure + .input(z.object({ paneId: z.string() })) + .mutation(({ input }) => { + browserManager.print(input.paneId); + return { success: true }; + }), + + onDownload: publicProcedure + .input(z.object({ paneId: z.string() })) + .subscription(({ input }) => { + return observable<{ + kind: "started" | "finished"; + filename: string; + targetPath: string; + url?: string; + state?: string; + }>((emit) => { + const started = (data: { + filename: string; + targetPath: string; + url: string; + }) => emit.next({ kind: "started", ...data }); + const finished = (data: { + filename: string; + targetPath: string; + state: string; + }) => emit.next({ kind: "finished", ...data }); + browserManager.on(`download-started:${input.paneId}`, started); + browserManager.on(`download-finished:${input.paneId}`, finished); + return () => { + browserManager.off(`download-started:${input.paneId}`, started); + browserManager.off(`download-finished:${input.paneId}`, finished); + }; + }); + }), + evaluateJS: publicProcedure .input(z.object({ paneId: z.string(), code: z.string() })) .mutation(async ({ input }) => { @@ -115,6 +255,32 @@ export const createBrowserRouter = () => { }); }), + /** Global subscription for new-window events from any browser pane. */ + onAnyNewWindow: publicProcedure.subscription(() => { + return observable<{ paneId: string; url: string }>((emit) => { + const handler = (data: { paneId: string; url: string }) => { + emit.next(data); + }; + browserManager.on("new-window", handler); + return () => { + browserManager.off("new-window", handler); + }; + }); + }), + + /** Global subscription for HTML5 fullscreen enter/leave from any browser pane. */ + onFullscreenChange: publicProcedure.subscription(() => { + return observable<{ paneId: string; isFullscreen: boolean }>((emit) => { + const handler = (data: { paneId: string; isFullscreen: boolean }) => { + emit.next(data); + }; + browserManager.on("fullscreen-change", handler); + return () => { + browserManager.off("fullscreen-change", handler); + }; + }); + }), + onContextMenuAction: publicProcedure .input(z.object({ paneId: z.string() })) .subscription(({ input }) => { @@ -136,6 +302,108 @@ export const createBrowserRouter = () => { return { success: true }; }), + findInPage: publicProcedure + .input( + z.object({ + paneId: z.string(), + text: z.string(), + forward: z.boolean().optional(), + findNext: z.boolean().optional(), + matchCase: z.boolean().optional(), + }), + ) + .mutation(({ input }) => { + const requestId = browserManager.findInPage(input.paneId, input.text, { + forward: input.forward, + findNext: input.findNext, + matchCase: input.matchCase, + }); + return { requestId }; + }), + + stopFindInPage: publicProcedure + .input( + z.object({ + paneId: z.string(), + action: z + .enum(["clearSelection", "keepSelection", "activateSelection"]) + .optional(), + }), + ) + .mutation(({ input }) => { + browserManager.stopFindInPage( + input.paneId, + input.action ?? "clearSelection", + ); + return { success: true }; + }), + + onFoundInPage: publicProcedure + .input(z.object({ paneId: z.string() })) + .subscription(({ input }) => { + return observable<{ + requestId: number; + activeMatchOrdinal: number; + matches: number; + finalUpdate: boolean; + }>((emit) => { + const handler = (data: { + requestId: number; + activeMatchOrdinal: number; + matches: number; + finalUpdate: boolean; + }) => { + emit.next(data); + }; + browserManager.on(`found-in-page:${input.paneId}`, handler); + return () => { + browserManager.off(`found-in-page:${input.paneId}`, handler); + }; + }); + }), + + onFindRequested: publicProcedure + .input(z.object({ paneId: z.string() })) + .subscription(({ input }) => { + return observable<{ type: "open" | "escape" }>((emit) => { + const openHandler = () => emit.next({ type: "open" }); + const escapeHandler = () => emit.next({ type: "escape" }); + browserManager.on(`find-requested:${input.paneId}`, openHandler); + browserManager.on(`find-escape:${input.paneId}`, escapeHandler); + return () => { + browserManager.off(`find-requested:${input.paneId}`, openHandler); + browserManager.off(`find-escape:${input.paneId}`, escapeHandler); + }; + }); + }), + + setZoomLevel: publicProcedure + .input(z.object({ paneId: z.string(), level: z.number() })) + .mutation(({ input }) => { + const wc = browserManager.getWebContents(input.paneId); + if (!wc) return { success: false }; + wc.setZoomLevel(input.level); + return { success: true }; + }), + + onZoomChanged: publicProcedure + .input(z.object({ paneId: z.string() })) + .subscription(({ input }) => { + return observable<{ zoomLevel: number }>((emit) => { + let lastLevel: number | null = null; + const interval = setInterval(() => { + const wc = browserManager.getWebContents(input.paneId); + if (!wc) return; + const level = wc.getZoomLevel(); + if (level !== lastLevel) { + lastLevel = level; + emit.next({ zoomLevel: level }); + } + }, 300); + return () => clearInterval(interval); + }); + }), + getPageInfo: publicProcedure .input(z.object({ paneId: z.string() })) .query(({ input }) => { @@ -150,6 +418,89 @@ export const createBrowserRouter = () => { }; }), + getSitePermissions: publicProcedure + .input(z.object({ url: z.string() })) + .query(({ input }) => { + return browserSitePermissionManager.getPermissionsForUrl(input.url); + }), + + setSitePermission: publicProcedure + .input( + z.object({ + origin: z.string(), + kind: z.enum(SITE_PERMISSION_KINDS), + value: z.enum(SITE_PERMISSION_VALUES), + }), + ) + .mutation(async ({ input }) => { + const sitePermissions = browserSitePermissionManager.setPermission( + input.origin, + input.kind, + input.value, + ); + + const mediaAccess = + input.value === "allow" && + (input.kind === "microphone" || input.kind === "camera") + ? await requestMediaAccess(input.kind) + : null; + + return { + ...sitePermissions, + mediaAccess, + }; + }), + + resetSitePermissions: publicProcedure + .input(z.object({ origin: z.string() })) + .mutation(({ input }) => { + browserSitePermissionManager.resetPermissions(input.origin); + return { success: true }; + }), + + onSitePermissionRequested: publicProcedure + .input(z.object({ paneId: z.string() })) + .subscription(({ input }) => { + return observable<{ + paneId: string; + origin: string; + permissions: ("microphone" | "camera")[]; + }>((emit) => { + const handler = (event: { + paneId: string; + origin: string; + permissions: ("microphone" | "camera")[]; + }) => { + emit.next(event); + }; + browserSitePermissionManager.on( + `permission-requested:${input.paneId}`, + handler, + ); + return () => { + browserSitePermissionManager.off( + `permission-requested:${input.paneId}`, + handler, + ); + }; + }); + }), + + showElementContextMenu: publicProcedure + .input( + z.object({ + webContentsId: z.number(), + x: z.number(), + y: z.number(), + }), + ) + .mutation(({ input }) => { + const wc = webContents.fromId(input.webContentsId); + if (!wc) return { success: false }; + browserManager.showContextMenuForWebContents(wc, input.x, input.y); + return { success: true }; + }), + clearBrowsingData: publicProcedure .input( z.object({ diff --git a/apps/desktop/src/lib/trpc/routers/changes/branches.ts b/apps/desktop/src/lib/trpc/routers/changes/branches.ts index 8283e4b4e29..dd67361aa15 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/branches.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/branches.ts @@ -1,4 +1,7 @@ +import { access } from "node:fs/promises"; +import { join, resolve } from "node:path"; import { worktrees } from "@superset/local-db"; +import { TRPCError } from "@trpc/server"; import { eq } from "drizzle-orm"; import { localDb } from "main/lib/local-db"; import type { SimpleGit } from "simple-git"; @@ -11,12 +14,70 @@ import { } from "../workspaces/utils/base-branch-config"; import { getCurrentBranch } from "../workspaces/utils/git"; import { getSimpleGitWithShellPath } from "../workspaces/utils/git-client"; -import { gitSwitchBranch } from "./security/git-commands"; -import { - assertRegisteredWorktree, - getRegisteredWorktree, -} from "./security/path-validation"; -import { clearStatusCacheForWorktree } from "./utils/status-cache"; +import { gitCreateBranch, gitSwitchBranch } from "./security/git-commands"; +import { assertRegisteredWorktree } from "./security/path-validation"; +import { clearWorktreeStatusCaches } from "./utils/worktree-status-caches"; + +const DEFAULT_REF_SEARCH_LIMIT = 50; +const MAX_REF_SEARCH_LIMIT = 200; +const GIT_PROGRESS_OPERATIONS = [ + { kind: "merge", path: "MERGE_HEAD" }, + { kind: "cherry-pick", path: "CHERRY_PICK_HEAD" }, + { kind: "revert", path: "REVERT_HEAD" }, + { kind: "bisect", path: "BISECT_LOG" }, +] as const; + +type BranchProgressOperation = + | "merge" + | "rebase" + | "cherry-pick" + | "revert" + | "bisect"; + +type SearchableRef = { + name: string; + displayName: string; + ref: string; + kind: "branch" | "tag"; + scope: "local" | "remote" | "tag"; + lastCommitDate: number; + shortHash: string | null; + authorName: string | null; + subject: string | null; + checkedOutPath: string | null; +}; + +type ParsedRefEntry = { + name: string; + shortHash: string | null; + authorName: string | null; + subject: string | null; + lastCommitDate: number; +}; + +const REF_FIELD_SEPARATOR = "\u001f"; +const REF_RECORD_SEPARATOR = "\u001e"; + +function normalizeBranchRef(branch: string): string { + if (branch.startsWith("refs/heads/")) { + return branch.slice("refs/heads/".length); + } + if (branch.startsWith("refs/remotes/origin/")) { + return branch.slice("refs/remotes/origin/".length); + } + if (branch.startsWith("remotes/origin/")) { + return branch.slice("remotes/origin/".length); + } + return branch; +} + +async function assertWorktreePathExists(worktreePath: string): Promise { + if (await pathExists(worktreePath)) return; + throw new TRPCError({ + code: "NOT_FOUND", + message: `Worktree path does not exist: ${worktreePath}`, + }); +} export const createBranchesRouter = () => { return router({ @@ -34,6 +95,7 @@ export const createBranchesRouter = () => { currentBranch: string | null; }> => { assertRegisteredWorktree(input.worktreePath); + await assertWorktreePathExists(input.worktreePath); const git = await getSimpleGitWithShellPath(input.worktreePath); @@ -92,6 +154,71 @@ export const createBranchesRouter = () => { }, ), + searchRefs: publicProcedure + .input( + z.object({ + worktreePath: z.string(), + search: z.string().default(""), + limit: z.number().int().min(1).max(MAX_REF_SEARCH_LIMIT).optional(), + includeTags: z.boolean().default(true), + }), + ) + .query( + async ({ + input, + }): Promise<{ + refs: SearchableRef[]; + defaultBranch: string; + currentBranch: string | null; + }> => { + assertRegisteredWorktree(input.worktreePath); + + const git = await getSimpleGitWithShellPath(input.worktreePath); + const currentBranch = await getCurrentBranch(input.worktreePath); + const checkedOutBranches = await getCheckedOutBranches( + git, + input.worktreePath, + ); + const refs = await getSearchableRefs(git, { + search: input.search, + includeTags: input.includeTags, + }); + const remoteBranchNames = refs + .filter((ref) => ref.kind === "branch" && ref.scope === "remote") + .map((ref) => ref.name); + const defaultBranch = await getDefaultBranch(git, remoteBranchNames); + + const sortedRefs = refs.sort((a, b) => { + if (a.kind !== b.kind) return a.kind === "branch" ? -1 : 1; + if (a.kind === "branch" && b.kind === "branch") { + if (a.name === currentBranch) return -1; + if (b.name === currentBranch) return 1; + if (a.name === defaultBranch) return -1; + if (b.name === defaultBranch) return 1; + if (a.scope !== b.scope) return a.scope === "local" ? -1 : 1; + } + if (a.lastCommitDate !== b.lastCommitDate) { + return b.lastCommitDate - a.lastCommitDate; + } + return a.displayName.localeCompare(b.displayName); + }); + + return { + refs: sortedRefs + .slice(0, input.limit ?? DEFAULT_REF_SEARCH_LIMIT) + .map((ref) => ({ + ...ref, + checkedOutPath: + ref.kind === "branch" + ? (checkedOutBranches[ref.name] ?? null) + : null, + })), + defaultBranch, + currentBranch, + }; + }, + ), + switchBranch: publicProcedure .input( z.object({ @@ -100,27 +227,70 @@ export const createBranchesRouter = () => { }), ) .mutation(async ({ input }): Promise<{ success: boolean }> => { - const worktree = getRegisteredWorktree(input.worktreePath); - await gitSwitchBranch(input.worktreePath, input.branch); - - const gitStatus = worktree.gitStatus - ? { ...worktree.gitStatus, branch: input.branch } - : null; - - localDb - .update(worktrees) - .set({ - branch: input.branch, - baseBranch: null, - gitStatus, - }) - .where(eq(worktrees.path, input.worktreePath)) - .run(); - - clearStatusCacheForWorktree(input.worktreePath); + await assertWorktreePathExists(input.worktreePath); + const branch = normalizeBranchRef(input.branch); + await gitSwitchBranch(input.worktreePath, branch); + const currentBranch = + (await getCurrentBranch(input.worktreePath)) ?? branch; + persistWorktreeBranch(input.worktreePath, currentBranch); + + clearWorktreeStatusCaches(input.worktreePath); return { success: true }; }), + getBranchGuardState: publicProcedure + .input(z.object({ worktreePath: z.string() })) + .query( + async ({ + input, + }): Promise<{ + operationInProgress: BranchProgressOperation | null; + }> => { + assertRegisteredWorktree(input.worktreePath); + + const git = await getSimpleGitWithShellPath(input.worktreePath); + + return { + operationInProgress: await detectGitProgressOperation( + git, + input.worktreePath, + ), + }; + }, + ), + + createBranch: publicProcedure + .input( + z.object({ + worktreePath: z.string(), + branch: z.string(), + startPoint: z.string().nullish(), + }), + ) + .mutation( + async ({ input }): Promise<{ success: boolean; branch: string }> => { + assertRegisteredWorktree(input.worktreePath); + + const git = await getSimpleGitWithShellPath(input.worktreePath); + const branchSummary = await git.branchLocal(); + if (branchSummary.all.includes(input.branch)) { + throw new Error(`Branch "${input.branch}" already exists.`); + } + + await gitCreateBranch( + input.worktreePath, + input.branch, + input.startPoint ?? undefined, + ); + const currentBranch = + (await getCurrentBranch(input.worktreePath)) ?? input.branch; + persistWorktreeBranch(input.worktreePath, currentBranch); + + clearWorktreeStatusCaches(input.worktreePath); + return { success: true, branch: currentBranch }; + }, + ), + updateBaseBranch: publicProcedure .input( z.object({ @@ -150,13 +320,16 @@ export const createBranchesRouter = () => { }); } - localDb - .update(worktrees) - .set({ baseBranch: input.baseBranch }) - .where(eq(worktrees.path, input.worktreePath)) - .run(); + const persistedWorktree = getPersistedWorktree(input.worktreePath); + if (persistedWorktree) { + localDb + .update(worktrees) + .set({ baseBranch: input.baseBranch }) + .where(eq(worktrees.path, input.worktreePath)) + .run(); + } - clearStatusCacheForWorktree(input.worktreePath); + clearWorktreeStatusCaches(input.worktreePath); return { success: true }; }), }); @@ -236,3 +409,244 @@ async function getCheckedOutBranches( return checkedOutBranches; } + +function getPersistedWorktree(worktreePath: string) { + return localDb + .select() + .from(worktrees) + .where(eq(worktrees.path, worktreePath)) + .get(); +} + +async function pathExists(path: string): Promise { + try { + await access(path); + return true; + } catch { + return false; + } +} + +async function detectGitProgressOperation( + git: SimpleGit, + worktreePath: string, +): Promise { + let gitDirPath: string; + + try { + const gitDir = (await git.revparse(["--git-dir"])).trim(); + gitDirPath = resolve(worktreePath, gitDir); + } catch { + return null; + } + + if ( + (await pathExists(join(gitDirPath, "rebase-merge"))) || + (await pathExists(join(gitDirPath, "rebase-apply"))) + ) { + return "rebase"; + } + + for (const candidate of GIT_PROGRESS_OPERATIONS) { + if (await pathExists(join(gitDirPath, candidate.path))) { + return candidate.kind; + } + } + + return null; +} + +function persistWorktreeBranch(worktreePath: string, branch: string): void { + const persistedWorktree = getPersistedWorktree(worktreePath); + if (!persistedWorktree) { + return; + } + + const gitStatus = persistedWorktree.gitStatus + ? { ...persistedWorktree.gitStatus, branch } + : null; + + localDb + .update(worktrees) + .set({ + branch, + baseBranch: null, + gitStatus, + }) + .where(eq(worktrees.path, worktreePath)) + .run(); +} + +async function getSearchableRefs( + git: SimpleGit, + { + search, + includeTags, + }: { + search: string; + includeTags: boolean; + }, +): Promise { + const searchLower = search.trim().toLowerCase(); + const refs: SearchableRef[] = []; + + try { + for (const localBranch of await getRefEntries(git, { + refPath: "refs/heads/", + dateField: "committerdate", + authorField: "authorname", + })) { + if (!matchesSearch(localBranch, searchLower)) continue; + + refs.push({ + name: localBranch.name, + displayName: localBranch.name, + ref: localBranch.name, + kind: "branch", + scope: "local", + lastCommitDate: localBranch.lastCommitDate, + shortHash: localBranch.shortHash, + authorName: localBranch.authorName, + subject: localBranch.subject, + checkedOutPath: null, + }); + } + } catch {} + + try { + for (const remoteBranch of await getRefEntries(git, { + refPath: "refs/remotes/origin/", + dateField: "committerdate", + authorField: "authorname", + })) { + if (remoteBranch.name === "origin/HEAD") continue; + const canonicalName = remoteBranch.name.startsWith("origin/") + ? remoteBranch.name.replace("origin/", "") + : remoteBranch.name; + const displayName = remoteBranch.name.startsWith("origin/") + ? remoteBranch.name + : `origin/${remoteBranch.name}`; + if ( + !matchesSearch( + { ...remoteBranch, name: canonicalName, displayName }, + searchLower, + ) + ) { + continue; + } + + refs.push({ + name: canonicalName, + displayName, + ref: displayName, + kind: "branch", + scope: "remote", + lastCommitDate: remoteBranch.lastCommitDate, + shortHash: remoteBranch.shortHash, + authorName: remoteBranch.authorName, + subject: remoteBranch.subject, + checkedOutPath: null, + }); + } + } catch {} + + if (includeTags) { + try { + for (const tag of await getRefEntries(git, { + refPath: "refs/tags/", + dateField: "creatordate", + authorField: "creatorname", + })) { + if (!matchesSearch(tag, searchLower)) continue; + + refs.push({ + name: tag.name, + displayName: tag.name, + ref: `refs/tags/${tag.name}`, + kind: "tag", + scope: "tag", + lastCommitDate: tag.lastCommitDate, + shortHash: tag.shortHash, + authorName: tag.authorName, + subject: tag.subject, + checkedOutPath: null, + }); + } + } catch {} + } + + return refs; +} + +async function getRefEntries( + git: SimpleGit, + { + refPath, + dateField, + authorField, + }: { + refPath: string; + dateField: "committerdate" | "creatordate"; + authorField: "authorname" | "creatorname"; + }, +): Promise { + const output = await git.raw([ + "for-each-ref", + `--sort=-${dateField}`, + `--format=%(refname:short)${REF_FIELD_SEPARATOR}%(objectname:short)${REF_FIELD_SEPARATOR}%(${authorField})${REF_FIELD_SEPARATOR}%(subject)${REF_FIELD_SEPARATOR}%(${dateField}:unix)${REF_RECORD_SEPARATOR}`, + refPath, + ]); + + return output + .split(REF_RECORD_SEPARATOR) + .map((line) => line.trim()) + .filter(Boolean) + .map((line) => { + const [ + name = "", + shortHash = "", + authorName = "", + subject = "", + timestamp = "0", + ] = line.split(REF_FIELD_SEPARATOR); + const parsedTimestamp = Number.parseInt(timestamp, 10); + + return { + name, + shortHash: normalizeRefField(shortHash), + authorName: normalizeRefField(authorName), + subject: normalizeRefField(subject), + lastCommitDate: Number.isNaN(parsedTimestamp) + ? 0 + : parsedTimestamp * 1000, + }; + }) + .filter((entry) => entry.name.length > 0); +} + +function normalizeRefField(value: string): string | null { + const normalized = value.trim(); + return normalized.length > 0 ? normalized : null; +} + +function matchesSearch( + ref: + | ParsedRefEntry + | (ParsedRefEntry & { displayName?: string }) + | SearchableRef, + searchLower: string, +): boolean { + if (!searchLower) { + return true; + } + + return [ + ref.name, + "displayName" in ref ? ref.displayName : null, + ref.shortHash, + ref.authorName, + ref.subject, + ] + .filter((value): value is string => Boolean(value)) + .some((value) => value.toLowerCase().includes(searchLower)); +} diff --git a/apps/desktop/src/lib/trpc/routers/changes/file-contents.ts b/apps/desktop/src/lib/trpc/routers/changes/file-contents.ts index f625d716294..6c085dcd4af 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/file-contents.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/file-contents.ts @@ -4,9 +4,13 @@ import type { SimpleGit } from "simple-git"; import { z } from "zod"; import { publicProcedure, router } from "../.."; import { toRegisteredWorktreeRelativePath } from "../workspace-fs-service"; -import { getSimpleGitWithShellPath } from "../workspaces/utils/git-client"; +import { + execGitWithShellPathBuffer, + getSimpleGitWithShellPath, +} from "../workspaces/utils/git-client"; const MAX_FILE_SIZE = 2 * 1024 * 1024; +const MAX_BINARY_FILE_SIZE = 10 * 1024 * 1024; export const createFileContentsRouter = () => { return router({ @@ -51,6 +55,46 @@ export const createFileContentsRouter = () => { }; }), + readGitFileBinary: publicProcedure + .input( + z.object({ + worktreePath: z.string(), + absolutePath: z.string(), + ref: z.string().default("HEAD"), + }), + ) + .query(async ({ input }): Promise<{ content: string | null }> => { + const relativePath = toRegisteredWorktreeRelativePath( + input.worktreePath, + input.absolutePath, + ); + const spec = `${input.ref}:${relativePath}`; + const git = await getSimpleGitWithShellPath(input.worktreePath); + + try { + const sizeOutput = await git.raw(["cat-file", "-s", spec]); + const blobSize = Number.parseInt(sizeOutput.trim(), 10); + if (!Number.isNaN(blobSize) && blobSize > MAX_BINARY_FILE_SIZE) { + return { content: null }; + } + } catch { + return { content: null }; + } + + try { + const { stdout } = await execGitWithShellPathBuffer( + ["cat-file", "-p", spec], + { + cwd: input.worktreePath, + maxBuffer: MAX_BINARY_FILE_SIZE, + }, + ); + return { content: stdout.toString("base64") }; + } catch { + return { content: null }; + } + }), + getGitOriginalContent: publicProcedure .input( z.object({ diff --git a/apps/desktop/src/lib/trpc/routers/changes/git-blame.ts b/apps/desktop/src/lib/trpc/routers/changes/git-blame.ts new file mode 100644 index 00000000000..2306484c132 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/changes/git-blame.ts @@ -0,0 +1,249 @@ +import { z } from "zod"; +import { publicProcedure, router } from "../.."; +import { toRegisteredWorktreeRelativePath } from "../workspace-fs-service"; +import { getSimpleGitWithShellPath } from "../workspaces/utils/git-client"; +import { + type GitHubCommitAuthor, + makeGitHubCommitAuthorCacheKey, + readCachedGitHubCommitAuthor, +} from "../workspaces/utils/github/cache"; +import { + extractNwoFromUrl, + getRepoContext, +} from "../workspaces/utils/github/repo-context"; +import { execWithShellEnv } from "../workspaces/utils/shell-env"; +import { assertRegisteredWorktree } from "./security/path-validation"; + +export interface BlameEntry { + line: number; + commitHash: string; + author: string; + timestamp: number; + summary: string; +} + +const GitHubCommitResponseSchema = z.object({ + author: z + .object({ + login: z.string().optional(), + avatar_url: z.string().optional(), + }) + .nullable() + .optional(), +}); + +function isSafeAvatarUrl(url: string): boolean { + try { + const parsed = new URL(url); + return parsed.protocol === "https:"; + } catch { + return false; + } +} + +function parseJsonOrNull(stdout: string): unknown | null { + try { + return JSON.parse(stdout) as unknown; + } catch { + return null; + } +} + +function getRepoCandidates( + repoContext: Awaited>, +): string[] { + if (!repoContext) { + return []; + } + + return Array.from( + new Set( + [repoContext.repoUrl, repoContext.upstreamUrl] + .map((url) => extractNwoFromUrl(url)) + .filter((value): value is string => Boolean(value)), + ), + ); +} + +async function fetchGitHubCommitAuthorForRepo({ + worktreePath, + repoNameWithOwner, + commitHash, +}: { + worktreePath: string; + repoNameWithOwner: string; + commitHash: string; +}): Promise { + const cacheKey = makeGitHubCommitAuthorCacheKey({ + repoNameWithOwner, + commitHash, + }); + + return readCachedGitHubCommitAuthor(cacheKey, async () => { + try { + const { stdout } = await execWithShellEnv( + "gh", + ["api", `repos/${repoNameWithOwner}/commits/${commitHash}`], + { cwd: worktreePath }, + ); + const raw = parseJsonOrNull(stdout); + if (raw === null) { + return null; + } + + const parsed = GitHubCommitResponseSchema.safeParse(raw); + if (!parsed.success) { + return null; + } + + const login = parsed.data.author?.login?.trim() || null; + const avatarUrl = + parsed.data.author?.avatar_url && + isSafeAvatarUrl(parsed.data.author.avatar_url) + ? parsed.data.author.avatar_url + : null; + + if (!login && !avatarUrl) { + return null; + } + + return { login, avatarUrl }; + } catch { + return null; + } + }); +} + +async function getGitHubCommitAuthor({ + worktreePath, + commitHash, +}: { + worktreePath: string; + commitHash: string; +}): Promise { + const repoContext = await getRepoContext(worktreePath); + + for (const repoNameWithOwner of getRepoCandidates(repoContext)) { + const author = await fetchGitHubCommitAuthorForRepo({ + worktreePath, + repoNameWithOwner, + commitHash, + }); + if (author) { + return author; + } + } + + return null; +} + +function parseGitBlamePorcelain(output: string): BlameEntry[] { + const lines = output.split("\n"); + const commitCache = new Map< + string, + { author: string; timestamp: number; summary: string } + >(); + const result: BlameEntry[] = []; + + let i = 0; + while (i < lines.length) { + const header = lines[i]; + if (!header || header.length < 40) { + i++; + continue; + } + + const commitHash = header.substring(0, 40); + if (!/^[0-9a-f]{40}$/.test(commitHash)) { + i++; + continue; + } + + const parts = header.split(" "); + const finalLine = Number.parseInt(parts[2] ?? "", 10); + + i++; + + let author = ""; + let timestamp = 0; + let summary = ""; + + if (!commitCache.has(commitHash)) { + while (i < lines.length && !lines[i].startsWith("\t")) { + const line = lines[i]; + if (line.startsWith("author ")) { + author = line.substring(7); + } else if (line.startsWith("author-time ")) { + timestamp = Number.parseInt(line.substring(12), 10); + } else if (line.startsWith("summary ")) { + summary = line.substring(8); + } + i++; + } + commitCache.set(commitHash, { author, timestamp, summary }); + } else { + while (i < lines.length && !lines[i].startsWith("\t")) { + i++; + } + // biome-ignore lint/style/noNonNullAssertion: commitHash is guaranteed to exist in cache at this point + const cached = commitCache.get(commitHash)!; + author = cached.author; + timestamp = cached.timestamp; + summary = cached.summary; + } + + // skip the tab+content line + i++; + + if (!Number.isNaN(finalLine)) { + result.push({ line: finalLine, commitHash, author, timestamp, summary }); + } + } + + return result; +} + +export const createGitBlameRouter = () => { + return router({ + getGitBlame: publicProcedure + .input( + z.object({ + worktreePath: z.string(), + absolutePath: z.string(), + }), + ) + .query(async ({ input }): Promise<{ entries: BlameEntry[] }> => { + assertRegisteredWorktree(input.worktreePath); + + const filePath = toRegisteredWorktreeRelativePath( + input.worktreePath, + input.absolutePath, + ); + + const git = await getSimpleGitWithShellPath(input.worktreePath); + + try { + const output = await git.raw([ + "blame", + "--porcelain", + "--", + filePath, + ]); + return { entries: parseGitBlamePorcelain(output) }; + } catch { + return { entries: [] }; + } + }), + getGitHubCommitAuthor: publicProcedure + .input( + z.object({ + worktreePath: z.string(), + commitHash: z.string().regex(/^[0-9a-f]{40}$/i), + }), + ) + .query(async ({ input }): Promise => { + assertRegisteredWorktree(input.worktreePath); + return getGitHubCommitAuthor(input); + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/changes/git-operation-types.ts b/apps/desktop/src/lib/trpc/routers/changes/git-operation-types.ts new file mode 100644 index 00000000000..c8287bafdb5 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/changes/git-operation-types.ts @@ -0,0 +1,45 @@ +/** + * Shared types for git operation responses that carry non-fatal warnings and + * partial-failure classification. Frontend maps these to the unified + * GitOperationDialog for auto-repair notifications and sync-partial reporting. + */ + +export type GitOperationWarning = + | { + kind: "auto-published-upstream"; + /** Branch that was auto-published when a pull/sync found no upstream. */ + branch: string; + } + | { + kind: "post-push-fetch-failed"; + /** Stderr of the failed fetch after a successful push. */ + message: string; + } + | { + kind: "push-retargeted"; + /** Remote name the push was redirected to (usually the fork host for a PR). */ + remote: string; + /** Branch name on that remote. */ + targetBranch: string; + } + | { + kind: "post-checkout-hook-failed"; + /** Brief hook stderr. */ + message: string; + }; + +/** + * Thrown by sync() so the frontend can distinguish which stage (pull or push) + * failed and show a tailored dialog. Message is the underlying git stderr. + */ +export class GitSyncStageError extends Error { + readonly stage: "pull" | "push"; + readonly cause: unknown; + constructor(stage: "pull" | "push", cause: unknown) { + const message = cause instanceof Error ? cause.message : String(cause); + super(`[sync:${stage}] ${message}`); + this.name = "GitSyncStageError"; + this.stage = stage; + this.cause = cause; + } +} diff --git a/apps/desktop/src/lib/trpc/routers/changes/git-operations.ts b/apps/desktop/src/lib/trpc/routers/changes/git-operations.ts index 73826001d8b..55fc0432410 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/git-operations.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/git-operations.ts @@ -1,8 +1,18 @@ +import { generateTitleFromMessage } from "@superset/chat/server/desktop"; import { TRPCError } from "@trpc/server"; +import { callSmallModel } from "lib/ai/call-small-model"; import { z } from "zod"; import { publicProcedure, router } from "../.."; +import { + setBranchPullRequestBaseRepoConfig, + unsetBranchPullRequestBaseRepoConfig, +} from "../workspaces/utils/base-branch-config"; import { getCurrentBranch } from "../workspaces/utils/git"; import { getSimpleGitWithShellPath } from "../workspaces/utils/git-client"; +import { + type GitOperationWarning, + GitSyncStageError, +} from "./git-operation-types"; import { isNoPullRequestFoundMessage, isUpstreamMissingError, @@ -20,7 +30,9 @@ import { mergePullRequest } from "./utils/merge-pull-request"; import { buildNewPullRequestUrl, findExistingOpenPRUrl, + resolvePullRequestBaseRepoSelection, } from "./utils/pull-request-discovery"; +import { normalizeGitHubRepoUrl } from "./utils/pull-request-url"; import { clearStatusCacheForWorktree } from "./utils/status-cache"; import { clearWorktreeStatusCaches } from "./utils/worktree-status-caches"; @@ -57,6 +69,8 @@ export const createGitOperationsRouter = () => { z.object({ worktreePath: z.string(), message: z.string(), + /** Pass --no-verify to bypass pre-commit / commit-msg hooks. */ + skipHooks: z.boolean().optional(), }), ) .mutation( @@ -64,7 +78,8 @@ export const createGitOperationsRouter = () => { assertRegisteredWorktree(input.worktreePath); const git = await getGitWithShellPath(input.worktreePath); - const result = await git.commit(input.message); + const options = input.skipHooks ? ["--no-verify"] : undefined; + const result = await git.commit(input.message, options); clearStatusCacheForWorktree(input.worktreePath); return { success: true, hash: result.commit }; }, @@ -77,34 +92,59 @@ export const createGitOperationsRouter = () => { setUpstream: z.boolean().optional(), }), ) - .mutation(async ({ input }): Promise<{ success: boolean }> => { - assertRegisteredWorktree(input.worktreePath); - - const git = await getGitWithShellPath(input.worktreePath); - const hasUpstream = await hasUpstreamBranch(git); - const localBranch = await getLocalBranchOrThrow({ - worktreePath: input.worktreePath, - action: "push", - }); + .mutation( + async ({ + input, + }): Promise<{ + success: boolean; + warnings: GitOperationWarning[]; + }> => { + assertRegisteredWorktree(input.worktreePath); - if (input.setUpstream && !hasUpstream) { - await pushWithResolvedUpstream({ - git, - worktreePath: input.worktreePath, - localBranch, - }); - } else { - await pushCurrentBranch({ - git, + const git = await getGitWithShellPath(input.worktreePath); + const hasUpstream = await hasUpstreamBranch(git); + const localBranch = await getLocalBranchOrThrow({ worktreePath: input.worktreePath, - localBranch, + action: "push", }); - } + const warnings: GitOperationWarning[] = []; - await fetchCurrentBranch(git, input.worktreePath); - clearStatusCacheForWorktree(input.worktreePath); - return { success: true }; - }), + if (input.setUpstream && !hasUpstream) { + await pushWithResolvedUpstream({ + git, + worktreePath: input.worktreePath, + localBranch, + }); + warnings.push({ + kind: "auto-published-upstream", + branch: localBranch, + }); + } else { + await pushCurrentBranch({ + git, + worktreePath: input.worktreePath, + localBranch, + }); + } + + try { + await fetchCurrentBranch(git, input.worktreePath); + } catch (fetchError) { + const message = + fetchError instanceof Error + ? fetchError.message + : String(fetchError); + console.warn( + "[git/push] post-push fetch failed (non-fatal):", + message, + ); + warnings.push({ kind: "post-push-fetch-failed", message }); + } + + clearStatusCacheForWorktree(input.worktreePath); + return { success: true, warnings }; + }, + ), pull: publicProcedure .input( @@ -138,45 +178,84 @@ export const createGitOperationsRouter = () => { worktreePath: z.string(), }), ) - .mutation(async ({ input }): Promise<{ success: boolean }> => { - assertRegisteredWorktree(input.worktreePath); + .mutation( + async ({ + input, + }): Promise<{ + success: boolean; + warnings: GitOperationWarning[]; + }> => { + assertRegisteredWorktree(input.worktreePath); - const git = await getGitWithShellPath(input.worktreePath); - try { - await git.pull(["--rebase"]); - } catch (error) { - const message = - error instanceof Error ? error.message : String(error); - if (isUpstreamMissingError(message)) { - const localBranch = await getLocalBranchOrThrow({ - worktreePath: input.worktreePath, - action: "push", - }); - await pushWithResolvedUpstream({ + const git = await getGitWithShellPath(input.worktreePath); + const warnings: GitOperationWarning[] = []; + + try { + await git.pull(["--rebase"]); + } catch (error) { + const message = + error instanceof Error ? error.message : String(error); + if (isUpstreamMissingError(message)) { + const localBranch = await getLocalBranchOrThrow({ + worktreePath: input.worktreePath, + action: "push", + }); + await pushWithResolvedUpstream({ + git, + worktreePath: input.worktreePath, + localBranch, + }); + warnings.push({ + kind: "auto-published-upstream", + branch: localBranch, + }); + try { + await fetchCurrentBranch(git, input.worktreePath); + } catch (fetchError) { + const fetchMessage = + fetchError instanceof Error + ? fetchError.message + : String(fetchError); + warnings.push({ + kind: "post-push-fetch-failed", + message: fetchMessage, + }); + } + clearStatusCacheForWorktree(input.worktreePath); + return { success: true, warnings }; + } + throw new GitSyncStageError("pull", error); + } + + const localBranch = await getLocalBranchOrThrow({ + worktreePath: input.worktreePath, + action: "push", + }); + try { + await pushCurrentBranch({ git, worktreePath: input.worktreePath, localBranch, }); + } catch (pushError) { + throw new GitSyncStageError("push", pushError); + } + try { await fetchCurrentBranch(git, input.worktreePath); - clearStatusCacheForWorktree(input.worktreePath); - return { success: true }; + } catch (fetchError) { + const fetchMessage = + fetchError instanceof Error + ? fetchError.message + : String(fetchError); + warnings.push({ + kind: "post-push-fetch-failed", + message: fetchMessage, + }); } - throw error; - } - - const localBranch = await getLocalBranchOrThrow({ - worktreePath: input.worktreePath, - action: "push", - }); - await pushCurrentBranch({ - git, - worktreePath: input.worktreePath, - localBranch, - }); - await fetchCurrentBranch(git, input.worktreePath); - clearStatusCacheForWorktree(input.worktreePath); - return { success: true }; - }), + clearStatusCacheForWorktree(input.worktreePath); + return { success: true, warnings }; + }, + ), fetch: publicProcedure .input(z.object({ worktreePath: z.string() })) @@ -193,10 +272,13 @@ export const createGitOperationsRouter = () => { z.object({ worktreePath: z.string(), allowOutOfDate: z.boolean().optional().default(false), + baseRepoUrl: z.string().url().optional(), }), ) .mutation( - async ({ input }): Promise<{ success: boolean; url: string }> => { + async ({ + input, + }): Promise<{ success: boolean; url: string; isExisting: boolean }> => { assertRegisteredWorktree(input.worktreePath); const git = await getGitWithShellPath(input.worktreePath); @@ -204,6 +286,23 @@ export const createGitOperationsRouter = () => { worktreePath: input.worktreePath, action: "create a pull request", }); + const normalizedBaseRepoUrl = input.baseRepoUrl + ? normalizeGitHubRepoUrl(input.baseRepoUrl) + : null; + if (normalizedBaseRepoUrl) { + const selection = await resolvePullRequestBaseRepoSelection({ + worktreePath: input.worktreePath, + branch, + preferredBaseRepoUrl: normalizedBaseRepoUrl, + }); + if (selection.selectedBaseRepoUrl === normalizedBaseRepoUrl) { + await setBranchPullRequestBaseRepoConfig({ + repoPath: input.worktreePath, + branch, + baseRepoUrl: normalizedBaseRepoUrl, + }); + } + } const trackingStatus = await getTrackingBranchStatus(git); const hasUpstream = trackingStatus.hasUpstream; @@ -258,7 +357,7 @@ export const createGitOperationsRouter = () => { if (existingPRUrl) { await fetchCurrentBranch(git, input.worktreePath); clearWorktreeStatusCaches(input.worktreePath); - return { success: true, url: existingPRUrl }; + return { success: true, url: existingPRUrl, isExisting: true }; } try { @@ -266,11 +365,12 @@ export const createGitOperationsRouter = () => { input.worktreePath, git, branch, + normalizedBaseRepoUrl, ); await fetchCurrentBranch(git, input.worktreePath); clearWorktreeStatusCaches(input.worktreePath); - return { success: true, url }; + return { success: true, url, isExisting: false }; } catch (error) { // If creation reports branch/tracking mismatch but an open PR exists, // recover by opening that existing PR instead of failing. @@ -280,13 +380,100 @@ export const createGitOperationsRouter = () => { if (recoveredPRUrl) { await fetchCurrentBranch(git, input.worktreePath); clearWorktreeStatusCaches(input.worktreePath); - return { success: true, url: recoveredPRUrl }; + return { + success: true, + url: recoveredPRUrl, + isExisting: true, + }; } throw error; } }, ), + resolveCreatePRBaseOptions: publicProcedure + .input( + z.object({ + worktreePath: z.string(), + }), + ) + .mutation( + async ({ + input, + }): Promise<{ + baseRepoOptions: Awaited< + ReturnType + >["baseRepoOptions"]; + selectedBaseRepoUrl: string | null; + requiresChoice: boolean; + }> => { + assertRegisteredWorktree(input.worktreePath); + + const branch = await getLocalBranchOrThrow({ + worktreePath: input.worktreePath, + action: "create a pull request", + }); + const selection = await resolvePullRequestBaseRepoSelection({ + worktreePath: input.worktreePath, + branch, + }); + + return { + ...selection, + requiresChoice: + selection.selectedBaseRepoUrl === null && + selection.baseRepoOptions.length > 1, + }; + }, + ), + + updatePullRequestBaseRepo: publicProcedure + .input( + z.object({ + worktreePath: z.string(), + baseRepoUrl: z.string().url().nullable(), + }), + ) + .mutation(async ({ input }): Promise<{ success: boolean }> => { + assertRegisteredWorktree(input.worktreePath); + + const branch = await getLocalBranchOrThrow({ + worktreePath: input.worktreePath, + action: "update the pull request base repository", + }); + const normalizedBaseRepoUrl = input.baseRepoUrl + ? normalizeGitHubRepoUrl(input.baseRepoUrl) + : null; + + if (normalizedBaseRepoUrl) { + const selection = await resolvePullRequestBaseRepoSelection({ + worktreePath: input.worktreePath, + branch, + preferredBaseRepoUrl: normalizedBaseRepoUrl, + }); + if (selection.selectedBaseRepoUrl !== normalizedBaseRepoUrl) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Invalid pull request base repository selection.", + }); + } + + await setBranchPullRequestBaseRepoConfig({ + repoPath: input.worktreePath, + branch, + baseRepoUrl: normalizedBaseRepoUrl, + }); + } else { + await unsetBranchPullRequestBaseRepoConfig({ + repoPath: input.worktreePath, + branch, + }); + } + + clearWorktreeStatusCaches(input.worktreePath); + return { success: true }; + }), + mergePR: publicProcedure .input( z.object({ @@ -337,5 +524,272 @@ export const createGitOperationsRouter = () => { } }, ), + + generateCommitMessage: publicProcedure + .input(z.object({ worktreePath: z.string() })) + .mutation(async ({ input }): Promise<{ message: string | null }> => { + assertRegisteredWorktree(input.worktreePath); + + const git = await getGitWithShellPath(input.worktreePath); + + // --------------------------------------------------------------------------- + // Hierarchical summarization (gptcommit-style): + // Phase 1 — Summarize each changed file independently (parallel) + // Phase 2 — Combine all summaries into a single commit message + // This avoids token-limit issues with large diffs and produces the + // most accurate results because no file content is truncated. + // --------------------------------------------------------------------------- + + // Collect per-file diffs from staged, unstaged, and untracked sources + const [stagedStat, unstagedStat, statusSummary] = await Promise.all([ + git.diff(["--cached", "--stat", "--stat-width=200"]), + git.diff(["--stat", "--stat-width=200"]), + git.status(), + ]); + + interface FileChange { + path: string; + source: "staged" | "unstaged" | "untracked"; + diff: string | null; // null for untracked / binary + } + + const files: FileChange[] = []; + + // Staged files + const stagedFiles = statusSummary.staged; + if (stagedFiles.length > 0) { + const diffs = await Promise.all( + stagedFiles.map((f) => + git + .diff(["--cached", "--", f]) + .then((d) => d.trim() || null) + .catch(() => null), + ), + ); + for (let i = 0; i < stagedFiles.length; i++) { + files.push({ + path: stagedFiles[i], + source: "staged", + diff: diffs[i], + }); + } + } + + // Unstaged files (modified tracked files) + const unstagedFiles = statusSummary.modified.filter( + (f) => !stagedFiles.includes(f), + ); + if (unstagedFiles.length > 0) { + const diffs = await Promise.all( + unstagedFiles.map((f) => + git + .diff(["--", f]) + .then((d) => d.trim() || null) + .catch(() => null), + ), + ); + for (let i = 0; i < unstagedFiles.length; i++) { + files.push({ + path: unstagedFiles[i], + source: "unstaged", + diff: diffs[i], + }); + } + } + + // Untracked files (new, not yet added) + for (const f of statusSummary.not_added) { + files.push({ path: f, source: "untracked", diff: null }); + } + + if (files.length === 0) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "No changes to generate a commit message for.", + }); + } + + // Skip patterns — files that waste tokens without useful context + const SKIP_PATTERNS = [ + /\.lock$/, + /package-lock\.json$/, + /bun\.lock(b)?$/, + /yarn\.lock$/, + /pnpm-lock\.yaml$/, + /\.min\.(js|css)$/, + ]; + const isBinary = (path: string) => + /\.(png|jpe?g|gif|ico|svg|webp|woff2?|ttf|eot|mp[34]|mov|zip|tar|gz|pdf)$/i.test( + path, + ); + + const summarizableFiles: FileChange[] = []; + const skippedFileNames: string[] = []; + + for (const f of files) { + if (SKIP_PATTERNS.some((p) => p.test(f.path)) || isBinary(f.path)) { + skippedFileNames.push(f.path); + } else { + summarizableFiles.push(f); + } + } + + // ---- Phase 1: Summarize each file in parallel ------------------------- + + const PHASE1_INSTRUCTIONS = + "与えられたdiffを1行の日本語で要約してください。何が変わったかを簡潔に。要約のみを返してください。"; + const PER_FILE_MAX_CHARS = 4000; + + const summarizeFile = async (f: FileChange): Promise => { + // Files without diff (untracked) — just report the file name + if (!f.diff) { + return `${f.path}: 新規ファイル`; + } + + // Small diffs — no need to call LLM, include directly + if (f.diff.length < 300) { + return `${f.path}: ${f.diff}`; + } + + const truncatedDiff = + f.diff.length > PER_FILE_MAX_CHARS + ? `${f.diff.slice(0, PER_FILE_MAX_CHARS)}\n... (truncated)` + : f.diff; + + const { result } = await callSmallModel({ + invoke: async ({ model, providerId, providerName }) => + generateTitleFromMessage({ + message: `File: ${f.path}\n\n${truncatedDiff}`, + agentModel: model, + agentId: `commit-file-summary-${providerId}`, + agentName: "File Summarizer", + instructions: PHASE1_INSTRUCTIONS, + tracingContext: { + surface: "commit-file-summary", + provider: providerName, + }, + }), + }); + + return `${f.path}: ${result ?? "変更あり"}`; + }; + + const fileSummaries = await Promise.all( + summarizableFiles.map(summarizeFile), + ); + + // ---- Phase 2: Generate final commit message from summaries ------------ + + let phase2Input = "変更されたファイルの要約:\n"; + phase2Input += fileSummaries.join("\n"); + if (skippedFileNames.length > 0) { + phase2Input += `\n\nその他の変更ファイル(依存関係・バイナリ):\n${skippedFileNames.join("\n")}`; + } + phase2Input += `\n\n変更の統計:\n${stagedStat || unstagedStat || "(統計なし)"}`; + + const PHASE2_PROMPT = `以下のファイル変更要約に基づいて、簡潔なconventional commitメッセージを日本語で生成してください。\nフォーマット: type(scope): 日本語の説明\ntypeは feat, fix, refactor, chore, docs, test, style, perf のいずれか。\n72文字以内。コミットメッセージのみを返してください。\n\n${phase2Input}`; + const PHASE2_INSTRUCTIONS = + "日本語で簡潔なconventional commitメッセージを生成してください。コミットメッセージの行のみを返してください。"; + + const { result, attempts } = await callSmallModel({ + invoke: async ({ model, providerId, providerName }) => + generateTitleFromMessage({ + message: PHASE2_PROMPT, + agentModel: model, + agentId: `commit-message-${providerId}`, + agentName: "Commit Message Generator", + instructions: PHASE2_INSTRUCTIONS, + tracingContext: { + surface: "commit-message-generation", + provider: providerName, + }, + }), + }); + + if (!result) { + console.warn( + "[generateCommitMessage] All providers failed:", + JSON.stringify(attempts, null, 2), + ); + } + + return { message: result }; + }), + + forceUnlockIndex: publicProcedure + .input(z.object({ worktreePath: z.string() })) + .mutation( + async ({ + input, + }): Promise<{ removed: boolean; path: string | null }> => { + assertRegisteredWorktree(input.worktreePath); + const { isAbsolute, resolve } = await import("node:path"); + const { stat, unlink } = await import("node:fs/promises"); + + // Resolve the *real* git-dir. For linked worktrees ".git" is a + // file that points at ".git/worktrees/", where the actual + // index.lock lives. Falling back to "/.git" is fine for + // the non-linked case. + const git = await getGitWithShellPath(input.worktreePath); + let gitDir: string; + try { + const raw = (await git.raw(["rev-parse", "--git-dir"])).trim(); + gitDir = isAbsolute(raw) ? raw : resolve(input.worktreePath, raw); + } catch { + gitDir = resolve(input.worktreePath, ".git"); + } + + const candidates = [ + resolve(gitDir, "index.lock"), + resolve(gitDir, "HEAD.lock"), + resolve(gitDir, "shallow.lock"), + ]; + // Walk every candidate so that index.lock and HEAD.lock + // co-existing (e.g. after a crash during a branch switch) can + // both be cleared in a single call. `path` in the response + // is the first lock removed so the UI has something concrete + // to show; `removed` is true if at least one file was deleted. + let firstRemoved: string | null = null; + for (const candidate of candidates) { + // stat: only swallow ENOENT (file not present). Other stat + // errors (EACCES, EPERM, EIO) are real failures and should + // surface so the user learns why the unlock did not run. + try { + await stat(candidate); + } catch (statError) { + const code = (statError as NodeJS.ErrnoException).code; + if (code === "ENOENT") continue; + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: `Failed to inspect lock file ${candidate}: ${ + statError instanceof Error + ? statError.message + : String(statError) + }`, + }); + } + // unlink failures (EACCES/EPERM when the file exists but can + // not be removed) are propagated verbatim — never swallowed. + try { + await unlink(candidate); + } catch (unlinkError) { + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: `Failed to remove lock file ${candidate}: ${ + unlinkError instanceof Error + ? unlinkError.message + : String(unlinkError) + }`, + }); + } + if (firstRemoved === null) firstRemoved = candidate; + } + if (firstRemoved !== null) { + clearStatusCacheForWorktree(input.worktreePath); + return { removed: true, path: firstRemoved }; + } + return { removed: false, path: null }; + }, + ), }); }; diff --git a/apps/desktop/src/lib/trpc/routers/changes/index.ts b/apps/desktop/src/lib/trpc/routers/changes/index.ts index e931f8f54af..eea4f1cfc1e 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/index.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/index.ts @@ -1,6 +1,7 @@ import { router } from "../.."; import { createBranchesRouter } from "./branches"; import { createFileContentsRouter } from "./file-contents"; +import { createGitBlameRouter } from "./git-blame"; import { createGitOperationsRouter } from "./git-operations"; import { createStagingRouter } from "./staging"; import { createStatusRouter } from "./status"; @@ -11,6 +12,7 @@ export const createChangesRouter = () => { const fileContentsRouter = createFileContentsRouter(); const stagingRouter = createStagingRouter(); const gitOperationsRouter = createGitOperationsRouter(); + const gitBlameRouter = createGitBlameRouter(); return router({ // Branch operations @@ -27,5 +29,8 @@ export const createChangesRouter = () => { // Git operations (commit, push, pull, sync, createPR) ...gitOperationsRouter._def.procedures, + + // Git blame + ...gitBlameRouter._def.procedures, }); }; diff --git a/apps/desktop/src/lib/trpc/routers/changes/security/git-commands.ts b/apps/desktop/src/lib/trpc/routers/changes/security/git-commands.ts index 230ea918154..3699bf221de 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/security/git-commands.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/security/git-commands.ts @@ -22,6 +22,45 @@ async function getGitWithShellPath(worktreePath: string) { return getSimpleGitWithShellPath(worktreePath); } +function normalizeBranchName(branch: string): string { + const trimmed = branch.trim(); + if (trimmed.startsWith("refs/heads/")) { + return trimmed.slice("refs/heads/".length); + } + if (trimmed.startsWith("refs/remotes/origin/")) { + return trimmed.slice("refs/remotes/origin/".length); + } + if (trimmed.startsWith("remotes/origin/")) { + return trimmed.slice("remotes/origin/".length); + } + if (trimmed.startsWith("origin/")) { + return trimmed.slice("origin/".length); + } + return trimmed; +} + +function assertValidBranchName(branch: string): void { + // Validate: reject anything that looks like a flag + if (branch.startsWith("-")) { + throw new Error("Invalid branch name: cannot start with -"); + } + + // Validate: reject empty branch names + if (!branch.trim()) { + throw new Error("Invalid branch name: cannot be empty"); + } +} + +function assertValidStartPoint(startPoint: string): void { + if (startPoint.startsWith("-")) { + throw new Error("Invalid start point: cannot start with -"); + } + + if (!startPoint.trim()) { + throw new Error("Invalid start point: cannot be empty"); + } +} + async function isCurrentBranch({ worktreePath, expectedBranch, @@ -50,25 +89,59 @@ export async function gitSwitchBranch( branch: string, ): Promise { assertRegisteredWorktree(worktreePath); - - // Validate: reject anything that looks like a flag - if (branch.startsWith("-")) { - throw new Error("Invalid branch name: cannot start with -"); - } - - // Validate: reject empty branch names - if (!branch.trim()) { - throw new Error("Invalid branch name: cannot be empty"); - } + const normalizedBranch = normalizeBranchName(branch); + assertValidBranchName(normalizedBranch); const git = await getGitWithShellPath(worktreePath); await runWithPostCheckoutHookTolerance({ - context: `Switched branch to "${branch}" in ${worktreePath}`, + context: `Switched branch to "${normalizedBranch}" in ${worktreePath}`, run: async () => { + const localBranches = await git.branchLocal(); + if (localBranches.all.includes(normalizedBranch)) { + try { + await git.raw(["switch", normalizedBranch]); + return; + } catch (switchError) { + const errorMessage = String(switchError); + if (errorMessage.includes("is not a git command")) { + await git.checkout(normalizedBranch); + return; + } + throw switchError; + } + } + + const remoteBranches = await git.branch(["-r"]); + const remoteBranch = `origin/${normalizedBranch}`; + if (remoteBranches.all.includes(remoteBranch)) { + try { + await git.raw([ + "switch", + "--track", + "-c", + normalizedBranch, + remoteBranch, + ]); + return; + } catch (switchError) { + const errorMessage = String(switchError); + if (errorMessage.includes("is not a git command")) { + await git.checkout([ + "-b", + normalizedBranch, + "--track", + remoteBranch, + ]); + return; + } + throw switchError; + } + } + try { // Prefer `git switch` - unambiguous branch operation (git 2.23+) - await git.raw(["switch", branch]); + await git.raw(["switch", normalizedBranch]); } catch (switchError) { // Check if it's because `switch` command doesn't exist (old git < 2.23) // Git outputs: "git: 'switch' is not a git command. See 'git --help'." @@ -76,12 +149,55 @@ export async function gitSwitchBranch( if (errorMessage.includes("is not a git command")) { // Fallback for older git versions // Note: checkout WITHOUT -- is correct for branches - await git.checkout(branch); + await git.checkout(normalizedBranch); } else { throw switchError; } } }, + didSucceed: async () => + isCurrentBranch({ worktreePath, expectedBranch: normalizedBranch }), + }); +} + +/** + * Create and switch to a new branch, optionally from a specific ref. + * + * Uses `git switch -c` (or `git checkout -b` as a fallback). + */ +export async function gitCreateBranch( + worktreePath: string, + branch: string, + startPoint?: string, +): Promise { + assertRegisteredWorktree(worktreePath); + assertValidBranchName(branch); + if (startPoint) { + assertValidStartPoint(startPoint); + } + + const git = await getGitWithShellPath(worktreePath); + + await runWithPostCheckoutHookTolerance({ + context: `Created branch "${branch}" in ${worktreePath}`, + run: async () => { + try { + await git.raw( + startPoint + ? ["switch", "-c", branch, startPoint] + : ["switch", "-c", branch], + ); + } catch (switchError) { + const errorMessage = String(switchError); + if (errorMessage.includes("is not a git command")) { + await git.checkout( + startPoint ? ["-b", branch, startPoint] : ["-b", branch], + ); + return; + } + throw switchError; + } + }, didSucceed: async () => isCurrentBranch({ worktreePath, expectedBranch: branch }), }); @@ -178,6 +294,20 @@ export async function gitStageAll(worktreePath: string): Promise { await git.add("-A"); } +/** + * Stage all changes to tracked files only. + * + * Uses `git add -u` so modifications and deletions of tracked files + * are staged, but untracked files are left alone. Matches the + * VS Code `git.smartCommitChanges: "tracked"` behavior. + */ +export async function gitStageTracked(worktreePath: string): Promise { + assertRegisteredWorktree(worktreePath); + + const git = await getGitWithShellPath(worktreePath); + await git.add(["-u"]); +} + /** * Unstage a file (remove from staging area). * diff --git a/apps/desktop/src/lib/trpc/routers/changes/staging.ts b/apps/desktop/src/lib/trpc/routers/changes/staging.ts index cbf7598eb4f..b3d7bc28bed 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/staging.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/staging.ts @@ -10,6 +10,7 @@ import { gitStageAll, gitStageFile, gitStageFiles, + gitStageTracked, gitStash, gitStashIncludeUntracked, gitStashPop, @@ -127,6 +128,14 @@ export const createStagingRouter = () => { return { success: true }; }), + stageTracked: publicProcedure + .input(z.object({ worktreePath: z.string() })) + .mutation(async ({ input }): Promise<{ success: boolean }> => { + await gitStageTracked(input.worktreePath); + clearStatusCacheForWorktree(input.worktreePath); + return { success: true }; + }), + unstageAll: publicProcedure .input(z.object({ worktreePath: z.string() })) .mutation(async ({ input }): Promise<{ success: boolean }> => { diff --git a/apps/desktop/src/lib/trpc/routers/changes/status.ts b/apps/desktop/src/lib/trpc/routers/changes/status.ts index 89b570bd6e9..fd0dd54f61b 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/status.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/status.ts @@ -1,5 +1,9 @@ import { TRPCError } from "@trpc/server"; -import type { ChangedFile, GitChangesStatus } from "shared/changes-types"; +import type { + ChangedFile, + CommitGraphData, + GitChangesStatus, +} from "shared/changes-types"; import { z } from "zod"; import { publicProcedure, router } from "../.."; import { assertRegisteredWorktree } from "./security/path-validation"; @@ -49,7 +53,7 @@ export const createStatusRouter = () => { { dedupeKey: cacheKey, strategy: "coalesce", - timeoutMs: 45_000, + timeoutMs: 90_000, }, ); @@ -112,5 +116,39 @@ export const createStatusRouter = () => { throw error; } }), + getCommitGraph: publicProcedure + .input( + z.object({ + worktreePath: z.string(), + maxCount: z.number().int().min(1).max(5_000).optional(), + }), + ) + .query(async ({ input }): Promise => { + assertRegisteredWorktree(input.worktreePath); + const effectiveMaxCount = input.maxCount ?? 500; + + try { + return await runGitTask( + "getCommitGraph", + { + worktreePath: input.worktreePath, + maxCount: effectiveMaxCount, + }, + { + dedupeKey: `graph:${input.worktreePath}:${effectiveMaxCount}`, + strategy: "coalesce", + timeoutMs: 30_000, + }, + ); + } catch (error) { + if (error instanceof Error && error.name === "NotGitRepoError") { + throw new TRPCError({ + code: "BAD_REQUEST", + message: error.message, + }); + } + throw error; + } + }), }); }; diff --git a/apps/desktop/src/lib/trpc/routers/changes/utils/apply-numstat.ts b/apps/desktop/src/lib/trpc/routers/changes/utils/apply-numstat.ts index 5c6b6c81334..12c679614ab 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/utils/apply-numstat.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/utils/apply-numstat.ts @@ -1,6 +1,9 @@ import type { ChangedFile } from "shared/changes-types"; import type { SimpleGit } from "simple-git"; import { parseDiffNumstat } from "./parse-status"; +import { withTimeout } from "./with-timeout"; + +const NUMSTAT_TIMEOUT_MS = 15_000; export async function applyNumstatToFiles( git: SimpleGit, @@ -10,7 +13,11 @@ export async function applyNumstatToFiles( if (files.length === 0) return; try { - const numstat = await git.raw(diffArgs); + const numstat = await withTimeout( + git.raw(diffArgs), + NUMSTAT_TIMEOUT_MS, + "diff numstat", + ); const stats = parseDiffNumstat(numstat); for (const file of files) { diff --git a/apps/desktop/src/lib/trpc/routers/changes/utils/existing-pr-push-target.ts b/apps/desktop/src/lib/trpc/routers/changes/utils/existing-pr-push-target.ts index 4f79251f98a..924ec02d4f0 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/utils/existing-pr-push-target.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/utils/existing-pr-push-target.ts @@ -1,29 +1,18 @@ import type { GitHubStatus } from "@superset/local-db"; -import { normalizeGitHubRepoUrl } from "./pull-request-url"; +import { + type GitRemoteInfo, + type GitTrackingRefInfo, + getPullRequestHeadRepoUrl, + isOpenPullRequestState, + type PullRequestPushTargetInfo, + resolveRemoteNameForPullRequestHead, +} from "../../workspaces/utils/github/pr-attachment"; -type ExistingPullRequest = NonNullable; - -export interface GitRemoteInfo { - name: string; - fetchUrl?: string; - pushUrl?: string; -} - -export interface GitTrackingRefInfo { - remoteName: string; - branchName: string; -} +export type { GitRemoteInfo }; -export interface ExistingPullRequestPushTargetInfo { - remote: string; - targetBranch: string; -} - -export function isOpenPullRequestState( - state: ExistingPullRequest["state"], -): boolean { - return state === "open" || state === "draft"; -} +type ExistingPullRequest = NonNullable; +export type ExistingPullRequestPushTargetInfo = PullRequestPushTargetInfo; +export { isOpenPullRequestState }; export function getExistingPRHeadRepoUrl( pr: Pick< @@ -31,15 +20,7 @@ export function getExistingPRHeadRepoUrl( "headRepositoryOwner" | "headRepositoryName" | "isCrossRepository" >, ): string | null { - if ( - !pr.isCrossRepository || - !pr.headRepositoryOwner || - !pr.headRepositoryName - ) { - return null; - } - - return `https://github.com/${pr.headRepositoryOwner}/${pr.headRepositoryName}`; + return getPullRequestHeadRepoUrl(pr); } export function resolveRemoteNameForExistingPRHead({ @@ -54,36 +35,11 @@ export function resolveRemoteNameForExistingPRHead({ >; fallbackRemote: string; }): string | null { - if (!pr.isCrossRepository) { - return fallbackRemote; - } - - const headRepoUrl = getExistingPRHeadRepoUrl(pr); - if (!headRepoUrl) { - return null; - } - - const normalizedHeadRepoUrl = normalizeGitHubRepoUrl(headRepoUrl); - if (!normalizedHeadRepoUrl) { - return null; - } - - for (const remote of remotes) { - const fetchUrl = remote.fetchUrl - ? normalizeGitHubRepoUrl(remote.fetchUrl) - : null; - const pushUrl = remote.pushUrl - ? normalizeGitHubRepoUrl(remote.pushUrl) - : null; - if ( - fetchUrl === normalizedHeadRepoUrl || - pushUrl === normalizedHeadRepoUrl - ) { - return remote.name; - } - } - - return null; + return resolveRemoteNameForPullRequestHead({ + remotes, + pr, + fallbackRemote, + }); } export function shouldRetargetPushToExistingPRHead({ diff --git a/apps/desktop/src/lib/trpc/routers/changes/utils/merge-pull-request.test.ts b/apps/desktop/src/lib/trpc/routers/changes/utils/merge-pull-request.test.ts index 68c4506db62..ef5e3688c31 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/utils/merge-pull-request.test.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/utils/merge-pull-request.test.ts @@ -201,7 +201,7 @@ describe("mergePullRequest", () => { expect(execWithShellEnvMock).toHaveBeenCalledWith( "gh", - ["pr", "merge", "42", "--rebase"], + ["pr", "merge", "42", "--rebase", "--repo", "superset-sh/superset"], { cwd: "/tmp/unborn-worktree" }, ); expect(getPRForBranchMock).toHaveBeenCalledWith( diff --git a/apps/desktop/src/lib/trpc/routers/changes/utils/merge-pull-request.ts b/apps/desktop/src/lib/trpc/routers/changes/utils/merge-pull-request.ts index 0fd984db33c..c1b35c0db87 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/utils/merge-pull-request.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/utils/merge-pull-request.ts @@ -4,6 +4,7 @@ import { } from "../../workspaces/utils/git"; import { execGitWithShellPath } from "../../workspaces/utils/git-client"; import { + extractNwoFromUrl, getPRForBranch, getPullRequestRepoArgs, getRepoContext, @@ -77,12 +78,15 @@ export async function mergePullRequest({ throw new Error(PR_CLOSED_MESSAGE); } + const prRepoNameWithOwner = extractNwoFromUrl(pr.url); const args = [ "pr", "merge", String(pr.number), `--${strategy}`, - ...getPullRequestRepoArgs(repoContext), + ...(prRepoNameWithOwner + ? ["--repo", prRepoNameWithOwner] + : getPullRequestRepoArgs(repoContext)), ]; try { diff --git a/apps/desktop/src/lib/trpc/routers/changes/utils/parse-status.test.ts b/apps/desktop/src/lib/trpc/routers/changes/utils/parse-status.test.ts index e9a481ca4cc..0b36ef67af8 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/utils/parse-status.test.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/utils/parse-status.test.ts @@ -263,6 +263,9 @@ describe("detectLanguage", () => { test("detects TypeScript files", () => { expect(detectLanguage("file.ts")).toBe("typescript"); expect(detectLanguage("file.tsx")).toBe("typescript"); + expect(detectLanguage("file.mts")).toBe("typescript"); + expect(detectLanguage("file.d.mts")).toBe("typescript"); + expect(detectLanguage("file.cts")).toBe("typescript"); }); test("detects JavaScript files", () => { diff --git a/apps/desktop/src/lib/trpc/routers/changes/utils/parse-status.ts b/apps/desktop/src/lib/trpc/routers/changes/utils/parse-status.ts index 598f6676252..b408718a715 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/utils/parse-status.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/utils/parse-status.ts @@ -28,12 +28,28 @@ function toChangedFile( }; } +const CONFLICT_PAIRS = new Set([ + // Only states that produce conflict markers in the file content. + // Non-marker states (DD, AU, UD, UA, DU) have no markers and are + // handled as unstaged changes via git add/rm instead. + "AA", // both added + "UU", // both modified +]); + +function isConflicted(index: string, working: string): boolean { + return CONFLICT_PAIRS.has(`${index}${working}`); +} + export function parseGitStatus( status: StatusResult, -): Pick { +): Pick< + GitChangesStatus, + "branch" | "staged" | "unstaged" | "untracked" | "conflicted" +> { const staged: ChangedFile[] = []; const unstaged: ChangedFile[] = []; const untracked: ChangedFile[] = []; + const conflicted: ChangedFile[] = []; for (const file of status.files) { const path = file.path; @@ -45,6 +61,16 @@ export function parseGitStatus( continue; } + if (isConflicted(index, working)) { + conflicted.push({ + path, + status: "modified", + additions: 0, + deletions: 0, + }); + continue; + } + if (index && index !== " " && index !== "?") { staged.push({ path, @@ -70,6 +96,7 @@ export function parseGitStatus( staged, unstaged, untracked, + conflicted, }; } diff --git a/apps/desktop/src/lib/trpc/routers/changes/utils/pull-request-discovery.ts b/apps/desktop/src/lib/trpc/routers/changes/utils/pull-request-discovery.ts index 1e3d5ab00dd..22abb83c06b 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/utils/pull-request-discovery.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/utils/pull-request-discovery.ts @@ -1,101 +1,32 @@ import { TRPCError } from "@trpc/server"; import type { SimpleGit } from "simple-git"; import { z } from "zod"; -import { execGitWithShellPath } from "../../workspaces/utils/git-client"; -import { getRepoContext } from "../../workspaces/utils/github"; -import { getPullRequestRepoArgs } from "../../workspaces/utils/github/repo-context"; +import { getBranchPullRequestBaseRepoConfig } from "../../workspaces/utils/base-branch-config"; +import { fetchGitHubPRStatus } from "../../workspaces/utils/github"; +import { + extractNwoFromUrl, + getRepoContext, + getTrackingRepoUrl, +} from "../../workspaces/utils/github/repo-context"; import { execWithShellEnv } from "../../workspaces/utils/shell-env"; import { buildPullRequestCompareUrl, normalizeGitHubRepoUrl, parseUpstreamRef, } from "./pull-request-url"; - -async function findOpenPRByHeadCommit( - worktreePath: string, -): Promise { - try { - const { stdout: headOutput } = await execGitWithShellPath( - ["rev-parse", "HEAD"], - { cwd: worktreePath }, - ); - const headSha = headOutput.trim(); - if (!headSha) { - return null; - } - - const repoArgs = getPullRequestRepoArgs(await getRepoContext(worktreePath)); - - const { stdout } = await execWithShellEnv( - "gh", - [ - "pr", - "list", - ...repoArgs, - "--state", - "open", - "--search", - `${headSha} is:pr`, - "--limit", - "20", - "--json", - "url,headRefOid", - ], - { cwd: worktreePath }, - ); - - const parsed = JSON.parse(stdout) as Array<{ - url?: string; - headRefOid?: string; - }>; - const match = parsed.find((candidate) => candidate.headRefOid === headSha); - return match?.url?.trim() || null; - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - console.warn( - "[git/findExistingOpenPRUrl] Failed commit-based PR lookup:", - message, - ); - return null; - } -} +import { clearWorktreeStatusCaches } from "./worktree-status-caches"; export async function findExistingOpenPRUrl( worktreePath: string, ): Promise { - // Prefer tracking-based lookup first for fork/branch-name mismatch scenarios. - try { - const { stdout } = await execWithShellEnv( - "gh", - [ - "pr", - "view", - "--json", - "url,state", - "--jq", - 'if .state == "OPEN" then .url else "" end', - ], - { cwd: worktreePath }, - ); - const url = stdout.trim(); - if (url) { - return url; - } - } catch (error) { - const message = error instanceof Error ? error.message : String(error); - const isNoPROpenError = message - .toLowerCase() - .includes("no pull requests found"); - if (!isNoPROpenError) { - console.warn( - "[git/findExistingOpenPRUrl] Failed tracking-branch PR lookup:", - message, - ); - } - // Fallback to commit-SHA search below. + clearWorktreeStatusCaches(worktreePath); + const githubStatus = await fetchGitHubPRStatus(worktreePath); + const pullRequest = githubStatus?.pr; + if (pullRequest?.state !== "open" && pullRequest?.state !== "draft") { + return null; } - return findOpenPRByHeadCommit(worktreePath); + return pullRequest.url.trim() || null; } const ghRepoMetadataSchema = z.object({ @@ -127,10 +58,142 @@ async function getMergeBaseBranch( } } +export interface PullRequestBaseRepoOption { + label: string; + repoNameWithOwner: string; + repoUrl: string; + source: "current" | "tracking" | "upstream"; +} + +function getPullRequestBaseRepoLabel( + repoNameWithOwner: string, + source: PullRequestBaseRepoOption["source"], +): string { + switch (source) { + case "tracking": + return `${repoNameWithOwner} (tracking remote)`; + case "upstream": + return `${repoNameWithOwner} (upstream repository)`; + default: + return `${repoNameWithOwner} (current repository)`; + } +} + +export async function getPullRequestBaseRepoOptions( + worktreePath: string, +): Promise { + const [repoContext, trackingRepoUrl] = await Promise.all([ + getRepoContext(worktreePath), + getTrackingRepoUrl(worktreePath), + ]); + + if (!repoContext) { + return []; + } + + const candidates: Array<{ + repoUrl: string | null; + source: PullRequestBaseRepoOption["source"]; + }> = [ + { repoUrl: trackingRepoUrl, source: "tracking" }, + { repoUrl: repoContext.repoUrl, source: "current" }, + { + repoUrl: repoContext.isFork ? repoContext.upstreamUrl : null, + source: "upstream", + }, + ]; + + const options = new Map(); + for (const candidate of candidates) { + const normalizedRepoUrl = normalizeGitHubRepoUrl(candidate.repoUrl ?? ""); + if (!normalizedRepoUrl || options.has(normalizedRepoUrl)) { + continue; + } + + const repoNameWithOwner = extractNwoFromUrl(normalizedRepoUrl); + if (!repoNameWithOwner) { + continue; + } + + options.set(normalizedRepoUrl, { + label: getPullRequestBaseRepoLabel(repoNameWithOwner, candidate.source), + repoNameWithOwner, + repoUrl: normalizedRepoUrl, + source: candidate.source, + }); + } + + return [...options.values()]; +} + +export async function resolvePullRequestBaseRepoSelection({ + worktreePath, + branch, + preferredBaseRepoUrl, +}: { + worktreePath: string; + branch: string; + preferredBaseRepoUrl?: string | null; +}): Promise<{ + baseRepoOptions: PullRequestBaseRepoOption[]; + selectedBaseRepoUrl: string | null; +}> { + const [baseRepoOptions, configuredBaseRepo] = await Promise.all([ + getPullRequestBaseRepoOptions(worktreePath), + getBranchPullRequestBaseRepoConfig({ + repoPath: worktreePath, + branch, + }), + ]); + + const normalizedPreferredBaseRepoUrl = preferredBaseRepoUrl + ? normalizeGitHubRepoUrl(preferredBaseRepoUrl) + : null; + if ( + normalizedPreferredBaseRepoUrl && + baseRepoOptions.some( + (option) => option.repoUrl === normalizedPreferredBaseRepoUrl, + ) + ) { + return { + baseRepoOptions, + selectedBaseRepoUrl: normalizedPreferredBaseRepoUrl, + }; + } + + const normalizedConfiguredBaseRepoUrl = configuredBaseRepo.baseRepoUrl + ? normalizeGitHubRepoUrl(configuredBaseRepo.baseRepoUrl) + : null; + if ( + normalizedConfiguredBaseRepoUrl && + baseRepoOptions.some( + (option) => option.repoUrl === normalizedConfiguredBaseRepoUrl, + ) + ) { + return { + baseRepoOptions, + selectedBaseRepoUrl: normalizedConfiguredBaseRepoUrl, + }; + } + + if (baseRepoOptions.length === 1) { + return { + baseRepoOptions, + selectedBaseRepoUrl: baseRepoOptions[0]?.repoUrl ?? null, + }; + } + + return { + baseRepoOptions, + selectedBaseRepoUrl: null, + }; +} + export async function buildNewPullRequestUrl( worktreePath: string, git: SimpleGit, branch: string, + preferredBaseRepoUrl?: string | null, ): Promise { const { stdout } = await execWithShellEnv( "gh", @@ -139,19 +202,32 @@ export async function buildNewPullRequestUrl( ); const repoMetadata = ghRepoMetadataSchema.parse(JSON.parse(stdout)); const currentRepoUrl = normalizeGitHubRepoUrl(repoMetadata.url); - const baseRepoUrl = normalizeGitHubRepoUrl( - repoMetadata.isFork && repoMetadata.parent?.url - ? repoMetadata.parent.url - : repoMetadata.url, - ); + const { baseRepoOptions, selectedBaseRepoUrl } = + await resolvePullRequestBaseRepoSelection({ + worktreePath, + branch, + preferredBaseRepoUrl, + }); + const baseRepoUrl = selectedBaseRepoUrl; - if (!currentRepoUrl || !baseRepoUrl) { + if (!currentRepoUrl) { throw new TRPCError({ code: "BAD_REQUEST", message: "GitHub is not available for this workspace.", }); } + if (!baseRepoUrl) { + throw new TRPCError({ + code: + baseRepoOptions.length === 0 ? "BAD_REQUEST" : "PRECONDITION_FAILED", + message: + baseRepoOptions.length === 0 + ? "No GitHub pull request base repository is available for this workspace." + : "Multiple base repositories are available. Choose a base repository before creating a pull request.", + }); + } + const configuredBaseBranch = await getMergeBaseBranch(git, branch); const baseBranch = configuredBaseBranch ?? repoMetadata.defaultBranchRef.name; let headRepoOwner = currentRepoUrl.split("/").at(-2) ?? ""; diff --git a/apps/desktop/src/lib/trpc/routers/changes/utils/with-timeout.ts b/apps/desktop/src/lib/trpc/routers/changes/utils/with-timeout.ts new file mode 100644 index 00000000000..6e3f498f227 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/changes/utils/with-timeout.ts @@ -0,0 +1,25 @@ +/** + * Race a promise against a timeout. Clears the timer on both resolve and reject + * to avoid leaked timers. + */ +export function withTimeout( + promise: Promise, + ms: number, + label: string, +): Promise { + return new Promise((resolve, reject) => { + const timer = setTimeout(() => { + reject(new Error(`git operation timed out after ${ms}ms: ${label}`)); + }, ms); + promise.then( + (value) => { + clearTimeout(timer); + resolve(value); + }, + (error) => { + clearTimeout(timer); + reject(error); + }, + ); + }); +} diff --git a/apps/desktop/src/lib/trpc/routers/changes/workers/git-task-handlers.ts b/apps/desktop/src/lib/trpc/routers/changes/workers/git-task-handlers.ts index 1b8ca618cd8..192788f830c 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/workers/git-task-handlers.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/workers/git-task-handlers.ts @@ -1,6 +1,11 @@ import { readFile, realpath, stat } from "node:fs/promises"; import { isAbsolute, relative, resolve, sep } from "node:path"; -import type { ChangedFile, GitChangesStatus } from "shared/changes-types"; +import type { + ChangedFile, + CommitGraphData, + CommitGraphNode, + GitChangesStatus, +} from "shared/changes-types"; import type { SimpleGit, StatusResult } from "simple-git"; import { getStatusNoLock } from "../../workspaces/utils/git"; import { getSimpleGitWithShellPath } from "../../workspaces/utils/git-client"; @@ -10,6 +15,7 @@ import { parseGitStatus, parseNameStatus, } from "../utils/parse-status"; +import { withTimeout } from "../utils/with-timeout"; import type { GitTaskPayloadMap, GitTaskResultMap, @@ -30,8 +36,15 @@ interface TrackingStatus { } const MAX_LINE_COUNT_SIZE = 1 * 1024 * 1024; +const MAX_UNTRACKED_LINE_COUNT_FILES = 200; const WORKER_DEBUG = process.env.SUPERSET_WORKER_DEBUG === "1"; +/** + * Per-operation timeout for individual git commands. + * Prevents a single slow operation from consuming the entire task budget. + */ +const GIT_OP_TIMEOUT_MS = 15_000; + function logWorkerWarning(message: string, error: unknown): void { console.warn(`[changes-git-worker] ${message}`, error); } @@ -69,6 +82,8 @@ async function applyUntrackedLineCount( worktreePath: string, untracked: ChangedFile[], ): Promise { + if (untracked.length > MAX_UNTRACKED_LINE_COUNT_FILES) return; + let worktreeReal: string; try { worktreeReal = await realpath(worktreePath); @@ -119,30 +134,38 @@ async function getBranchComparison( let behind = 0; try { - const tracking = await git.raw([ - "rev-list", - "--left-right", - "--count", - `origin/${defaultBranch}...HEAD`, - ]); + const tracking = await withTimeout( + git.raw([ + "rev-list", + "--left-right", + "--count", + `origin/${defaultBranch}...HEAD`, + ]), + GIT_OP_TIMEOUT_MS, + "rev-list count", + ); const [behindStr, aheadStr] = tracking.trim().split(/\s+/); behind = Number.parseInt(behindStr || "0", 10); ahead = Number.parseInt(aheadStr || "0", 10); - const logOutput = await git.raw([ - "log", - `origin/${defaultBranch}..HEAD`, - "--max-count=500", - "--format=%H|%h|%s|%an|%aI", - ]); + const logOutput = await withTimeout( + git.raw([ + "log", + `origin/${defaultBranch}..HEAD`, + "--max-count=500", + "--format=%H|%h|%s|%an|%aI", + ]), + GIT_OP_TIMEOUT_MS, + "log commits", + ); commits = parseGitLog(logOutput); if (ahead > 0) { - const nameStatus = await git.raw([ - "diff", - "--name-status", - `origin/${defaultBranch}...HEAD`, - ]); + const nameStatus = await withTimeout( + git.raw(["diff", "--name-status", `origin/${defaultBranch}...HEAD`]), + GIT_OP_TIMEOUT_MS, + "diff name-status", + ); againstBase = parseNameStatus(nameStatus); await applyNumstatToFiles(git, againstBase, [ @@ -165,21 +188,20 @@ async function getTrackingBranchStatus( git: SimpleGit, ): Promise { try { - const upstream = await git.raw([ - "rev-parse", - "--abbrev-ref", - "@{upstream}", - ]); + const upstream = await withTimeout( + git.raw(["rev-parse", "--abbrev-ref", "@{upstream}"]), + GIT_OP_TIMEOUT_MS, + "rev-parse upstream", + ); if (!upstream.trim()) { return { pushCount: 0, pullCount: 0, hasUpstream: false }; } - const tracking = await git.raw([ - "rev-list", - "--left-right", - "--count", - "@{upstream}...HEAD", - ]); + const tracking = await withTimeout( + git.raw(["rev-list", "--left-right", "--count", "@{upstream}...HEAD"]), + GIT_OP_TIMEOUT_MS, + "rev-list tracking", + ); const [pullStr, pushStr] = tracking.trim().split(/\s+/); return { pushCount: Number.parseInt(pushStr || "0", 10), @@ -217,6 +239,7 @@ async function computeStatus({ staged: parsed.staged, unstaged: parsed.unstaged, untracked: parsed.untracked, + conflicted: parsed.conflicted, ahead: branchComparison.ahead, behind: branchComparison.behind, pushCount: trackingStatus.pushCount, @@ -251,6 +274,70 @@ async function computeCommitFiles({ return files; } +function parseGitGraphLog(logOutput: string): CommitGraphNode[] { + if (!logOutput.trim()) return []; + + const nodes: CommitGraphNode[] = []; + const parts = logOutput.split("\x00"); + + for (let index = 0; index + 10 < parts.length; index += 11) { + const [ + hash, + shortHash, + message, + fullMessageRaw, + author, + authorEmail, + committer, + committerEmail, + dateStr, + parentsStr, + refsStr, + ] = parts.slice(index, index + 11); + if (!hash || !shortHash) continue; + + const date = dateStr ? new Date(dateStr) : new Date(); + const parentHashes = parentsStr?.trim() ? parentsStr.trim().split(" ") : []; + const refs = refsStr?.trim() + ? refsStr.trim().split(", ").filter(Boolean) + : []; + + nodes.push({ + hash, + shortHash, + message: message ?? "", + fullMessage: fullMessageRaw?.trimEnd() || message || "", + author: author ?? "", + authorEmail: authorEmail ?? "", + committer: committer ?? "", + committerEmail: committerEmail ?? "", + date, + parentHashes, + refs, + }); + } + return nodes; +} + +async function computeCommitGraph({ + worktreePath, + maxCount = 500, +}: GitTaskPayloadMap["getCommitGraph"]): Promise { + const git = await getSimpleGitWithShellPath(worktreePath); + const logOutput = await git.raw([ + "log", + "--all", + "--topo-order", + "--date-order", + "--decorate=short", + `--max-count=${maxCount}`, + "-z", + "--format=%H%x00%h%x00%s%x00%B%x00%an%x00%ae%x00%cn%x00%ce%x00%aI%x00%P%x00%D", + ]); + const nodes = parseGitGraphLog(logOutput); + return { nodes }; +} + export async function executeGitTask( taskType: TTask, payload: GitTaskPayloadMap[TTask], @@ -264,6 +351,10 @@ export async function executeGitTask( return computeCommitFiles( payload as GitTaskPayloadMap["getCommitFiles"], ) as Promise; + case "getCommitGraph": + return computeCommitGraph( + payload as GitTaskPayloadMap["getCommitGraph"], + ) as Promise; default: { const exhaustive: never = taskType; throw new Error(`Unknown git task: ${exhaustive}`); diff --git a/apps/desktop/src/lib/trpc/routers/changes/workers/git-task-types.ts b/apps/desktop/src/lib/trpc/routers/changes/workers/git-task-types.ts index 2a9bbff5da6..c07017017aa 100644 --- a/apps/desktop/src/lib/trpc/routers/changes/workers/git-task-types.ts +++ b/apps/desktop/src/lib/trpc/routers/changes/workers/git-task-types.ts @@ -1,4 +1,8 @@ -import type { ChangedFile, GitChangesStatus } from "shared/changes-types"; +import type { + ChangedFile, + CommitGraphData, + GitChangesStatus, +} from "shared/changes-types"; export interface GitTaskPayloadMap { getStatus: { @@ -9,11 +13,16 @@ export interface GitTaskPayloadMap { worktreePath: string; commitHash: string; }; + getCommitGraph: { + worktreePath: string; + maxCount?: number; + }; } export interface GitTaskResultMap { getStatus: GitChangesStatus; getCommitFiles: ChangedFile[]; + getCommitGraph: CommitGraphData; } export type GitTaskType = keyof GitTaskPayloadMap; diff --git a/apps/desktop/src/lib/trpc/routers/databases/index.ts b/apps/desktop/src/lib/trpc/routers/databases/index.ts new file mode 100644 index 00000000000..7b62d6b3253 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/databases/index.ts @@ -0,0 +1,1043 @@ +import type { Stats } from "node:fs"; +import { stat } from "node:fs/promises"; +import path from "node:path"; +import { TRPCError } from "@trpc/server"; +import Database from "better-sqlite3"; +import fg from "fast-glob"; +import { Client } from "pg"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; +import { + deleteManualPostgresConnectionString, + discoverWorkspaceConfiguredDatabases, + getManualPostgresConnectionString, + postgresConnectionSourceSchema, + resolvePostgresConnectionStringFromSource, + saveManualPostgresConnectionString, + saveWorkspaceDatabaseCredentials, + updateWorkspaceDatabaseDefinition, +} from "./workspace-config"; + +const SQLITE_FILE_GLOBS = [ + "**/*.db", + "**/*.sqlite", + "**/*.sqlite3", + "**/*.db3", + "**/*.duckdb", +]; + +const SQLITE_ROW_ID_COLUMN = "__superset_rowid"; +const SQLITE_PRIMARY_KEY_COLUMN = "__superset_primary_key"; +const POSTGRES_ROW_ID_COLUMN = "__superset_ctid"; +const PREVIEW_TEXT_LIMIT = 180; + +function isAbsoluteFilesystemPath(inputPath: string): boolean { + return path.isAbsolute(inputPath) || /^[A-Za-z]:[\\/]/.test(inputPath); +} + +function ensureAbsoluteFilesystemPath(inputPath: string): void { + if (!isAbsoluteFilesystemPath(inputPath)) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Database path must be absolute.", + }); + } +} + +async function ensureExistingFile(inputPath: string): Promise { + let metadata: Stats; + try { + metadata = await stat(inputPath); + } catch { + throw new TRPCError({ + code: "NOT_FOUND", + message: `Database file not found: ${inputPath}`, + }); + } + + if (!metadata.isFile()) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: `Path is not a file: ${inputPath}`, + }); + } +} + +async function ensureExistingDirectory(inputPath: string): Promise { + let metadata: Stats; + try { + metadata = await stat(inputPath); + } catch { + throw new TRPCError({ + code: "NOT_FOUND", + message: `Workspace path not found: ${inputPath}`, + }); + } + + if (!metadata.isDirectory()) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: `Path is not a directory: ${inputPath}`, + }); + } +} + +function quoteSqliteIdentifier(identifier: string): string { + return `"${identifier.replaceAll('"', '""')}"`; +} + +function quotePostgresIdentifier(identifier: string): string { + return `"${identifier.replaceAll('"', '""')}"`; +} + +function quoteSqlStringLiteral(value: string): string { + return `'${value.replaceAll("'", "''")}'`; +} + +function buildSqlitePreviewExpression( + columnName: string, + declaredType: string | null | undefined, +): string { + const quotedColumn = quoteSqliteIdentifier(columnName); + const normalizedType = (declaredType ?? "").toLowerCase(); + + if (normalizedType.includes("blob")) { + return `CASE WHEN ${quotedColumn} IS NULL THEN NULL ELSE '' END AS ${quoteSqliteIdentifier(columnName)}`; + } + + if ( + normalizedType.includes("text") || + normalizedType.includes("char") || + normalizedType.includes("clob") || + normalizedType.includes("json") || + normalizedType.length === 0 + ) { + return `CASE + WHEN ${quotedColumn} IS NULL THEN NULL + WHEN typeof(${quotedColumn}) = 'text' AND length(CAST(${quotedColumn} AS TEXT)) > ${PREVIEW_TEXT_LIMIT} + THEN substr(CAST(${quotedColumn} AS TEXT), 1, ${PREVIEW_TEXT_LIMIT}) || '…' + ELSE ${quotedColumn} + END AS ${quoteSqliteIdentifier(columnName)}`; + } + + return `${quotedColumn} AS ${quoteSqliteIdentifier(columnName)}`; +} + +function buildPostgresPreviewExpression(input: { + columnName: string; + dataType: string; + udtName: string; +}): string { + const quotedColumn = quotePostgresIdentifier(input.columnName); + const outputAlias = quotePostgresIdentifier(input.columnName); + const normalizedType = input.dataType.toLowerCase(); + const normalizedUdtName = input.udtName.toLowerCase(); + + if (normalizedType === "bytea") { + return `CASE WHEN ${quotedColumn} IS NULL THEN NULL ELSE '' END AS ${outputAlias}`; + } + + if (normalizedType === "json" || normalizedType === "jsonb") { + return `CASE + WHEN ${quotedColumn} IS NULL THEN NULL + ELSE '<${normalizedType}> ' || left(${quotedColumn}::text, ${PREVIEW_TEXT_LIMIT}) || + CASE WHEN length(${quotedColumn}::text) > ${PREVIEW_TEXT_LIMIT} THEN '…' ELSE '' END + END AS ${outputAlias}`; + } + + if (normalizedType === "array") { + return `CASE + WHEN ${quotedColumn} IS NULL THEN NULL + ELSE 'Array(' || coalesce(cardinality(${quotedColumn}), 0)::text || ') ' || + left(${quotedColumn}::text, ${PREVIEW_TEXT_LIMIT}) || + CASE WHEN length(${quotedColumn}::text) > ${PREVIEW_TEXT_LIMIT} THEN '…' ELSE '' END + END AS ${outputAlias}`; + } + + if ( + normalizedType === "text" || + normalizedType === "character varying" || + normalizedType === "character" || + normalizedType === "xml" || + normalizedType === "citext" || + normalizedType === "tsvector" || + normalizedType === "tsquery" || + normalizedUdtName === "vector" || + normalizedUdtName === "halfvec" || + normalizedUdtName === "sparsevec" || + normalizedUdtName === "geometry" || + normalizedUdtName === "geography" || + normalizedUdtName === "hstore" + ) { + return `CASE + WHEN ${quotedColumn} IS NULL THEN NULL + WHEN length(${quotedColumn}::text) > ${PREVIEW_TEXT_LIMIT} + THEN left(${quotedColumn}::text, ${PREVIEW_TEXT_LIMIT}) || '…' + ELSE ${quotedColumn}::text + END AS ${outputAlias}`; + } + + return `${quotedColumn} AS ${outputAlias}`; +} + +function getSqliteTableMetadata( + db: Database.Database, + tableName: string, +): { + columns: Array<{ + cid: number; + name: string; + type: string | null; + notnull: 0 | 1; + dflt_value: string | null; + pk: number; + }>; + primaryKeyColumns: Array<{ + cid: number; + name: string; + type: string | null; + notnull: 0 | 1; + dflt_value: string | null; + pk: number; + }>; + hasRowId: boolean; +} { + const columns = db + .prepare(`PRAGMA table_info(${quoteSqliteIdentifier(tableName)})`) + .all() as Array<{ + cid: number; + name: string; + type: string | null; + notnull: 0 | 1; + dflt_value: string | null; + pk: number; + }>; + const primaryKeyColumns = columns + .filter((column) => column.pk > 0) + .sort((left, right) => left.pk - right.pk); + const tableDefinition = db + .prepare( + "SELECT sql FROM sqlite_master WHERE type = 'table' AND name = ? LIMIT 1", + ) + .get(tableName) as { sql?: string | null } | undefined; + + return { + columns, + primaryKeyColumns, + hasRowId: !/without\s+rowid/i.test(tableDefinition?.sql ?? ""), + }; +} + +function buildSqlitePrimaryKeyPreviewExpression( + primaryKeyColumns: Array<{ name: string }>, +): string { + if (primaryKeyColumns.length === 0) { + return `NULL AS ${quoteSqliteIdentifier(SQLITE_PRIMARY_KEY_COLUMN)}`; + } + + const jsonEntries = primaryKeyColumns.flatMap((column) => [ + quoteSqlStringLiteral(column.name), + quoteSqliteIdentifier(column.name), + ]); + + return `json_object(${jsonEntries.join(", ")}) AS ${quoteSqliteIdentifier(SQLITE_PRIMARY_KEY_COLUMN)}`; +} + +function openSqliteDatabase(databasePath: string): Database.Database { + try { + return new Database(databasePath, { + fileMustExist: true, + }); + } catch (error) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: + error instanceof Error + ? error.message + : "Failed to open SQLite database.", + }); + } +} + +async function withPostgresClient( + connectionString: string, + callback: (client: Client) => Promise, +): Promise { + const client = new Client({ connectionString }); + + try { + await client.connect(); + return await callback(client); + } catch (error) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: + error instanceof Error + ? error.message + : "Failed to connect to PostgreSQL.", + }); + } finally { + await client.end().catch(() => undefined); + } +} + +function stripTrailingSemicolon(sql: string): string { + return sql.replace(/;\s*$/, ""); +} + +function canApplyPostgresReadLimit(sql: string): boolean { + return /^(select|with|values|table)\b/i.test(sql.trim()); +} + +export const createDatabasesRouter = () => { + return router({ + discoverSqliteFiles: publicProcedure + .input( + z.object({ + worktreePath: z.string().min(1), + limit: z.number().int().positive().max(200).optional(), + }), + ) + .query(async ({ input }) => { + ensureAbsoluteFilesystemPath(input.worktreePath); + await ensureExistingDirectory(input.worktreePath); + + const limit = input.limit ?? 50; + const files = await fg(SQLITE_FILE_GLOBS, { + absolute: true, + cwd: input.worktreePath, + onlyFiles: true, + unique: true, + suppressErrors: true, + ignore: [ + "**/.git/**", + "**/.next/**", + "**/.turbo/**", + "**/dist/**", + "**/node_modules/**", + ], + }); + + return { + files: files + .sort((left, right) => left.localeCompare(right)) + .slice(0, limit) + .map((absolutePath) => ({ + absolutePath, + relativePath: path.relative(input.worktreePath, absolutePath), + })), + }; + }), + + discoverWorkspaceDatabases: publicProcedure + .input( + z.object({ + worktreePath: z.string().min(1), + limit: z.number().int().positive().max(200).optional(), + }), + ) + .query(async ({ input }) => { + ensureAbsoluteFilesystemPath(input.worktreePath); + await ensureExistingDirectory(input.worktreePath); + + const limit = input.limit ?? 50; + const files = await fg(SQLITE_FILE_GLOBS, { + absolute: true, + cwd: input.worktreePath, + onlyFiles: true, + unique: true, + suppressErrors: true, + ignore: [ + "**/.git/**", + "**/.next/**", + "**/.turbo/**", + "**/dist/**", + "**/node_modules/**", + ], + }); + + const configuredDatabases = await discoverWorkspaceConfiguredDatabases( + input.worktreePath, + ); + const configuredSqlitePaths = new Set( + configuredDatabases + .filter((item) => item.dialect === "sqlite") + .map((item) => item.absolutePath), + ); + + const fileItems = files + .filter((absolutePath) => !configuredSqlitePaths.has(absolutePath)) + .map((absolutePath) => ({ + source: "file" as const, + dialect: "sqlite" as const, + absolutePath, + relativePath: path.relative(input.worktreePath, absolutePath), + })); + + const items = [ + ...fileItems.slice( + 0, + Math.max(0, limit - configuredDatabases.length), + ), + ...configuredDatabases, + ].sort((left, right) => + left.relativePath.localeCompare(right.relativePath), + ); + + return { items }; + }), + + saveWorkspaceDatabaseCredentials: publicProcedure + .input( + z.object({ + worktreePath: z.string().min(1), + definitionId: z.string().min(1), + username: z.string().min(1), + password: z.string(), + }), + ) + .mutation(async ({ input }) => { + ensureAbsoluteFilesystemPath(input.worktreePath); + await ensureExistingDirectory(input.worktreePath); + await saveWorkspaceDatabaseCredentials({ + workspacePath: input.worktreePath, + definitionId: input.definitionId, + username: input.username, + password: input.password, + }); + return { ok: true }; + }), + + updateWorkspaceDatabaseDefinition: publicProcedure + .input( + z.object({ + worktreePath: z.string().min(1), + definitionId: z.string().min(1), + definition: z.discriminatedUnion("dialect", [ + z.object({ + dialect: z.literal("sqlite"), + label: z.string().min(1), + group: z.string().trim().min(1).optional(), + databasePath: z.string().min(1), + }), + z.object({ + dialect: z.literal("postgres"), + label: z.string().min(1), + group: z.string().trim().min(1).optional(), + host: z.string().min(1), + port: z.number().int().positive().max(65535), + database: z.string().optional(), + ssl: z.boolean(), + username: z.string().min(1).optional(), + }), + ]), + }), + ) + .mutation(async ({ input }) => { + ensureAbsoluteFilesystemPath(input.worktreePath); + await ensureExistingDirectory(input.worktreePath); + const definition = await updateWorkspaceDatabaseDefinition({ + workspacePath: input.worktreePath, + definitionId: input.definitionId, + definition: input.definition, + }); + return { definition }; + }), + + saveManualPostgresConnectionString: publicProcedure + .input( + z.object({ + connectionId: z.string().min(1), + connectionString: z.string().min(1), + }), + ) + .mutation(async ({ input }) => { + await saveManualPostgresConnectionString( + input.connectionId, + input.connectionString, + ); + return { ok: true }; + }), + + getManualPostgresConnectionString: publicProcedure + .input( + z.object({ + connectionId: z.string().min(1), + }), + ) + .query(async ({ input }) => { + const connectionString = await getManualPostgresConnectionString( + input.connectionId, + ); + if (connectionString === null) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Manual Postgres connection string not found.", + }); + } + return { connectionString }; + }), + + deleteManualPostgresConnectionString: publicProcedure + .input( + z.object({ + connectionId: z.string().min(1), + }), + ) + .mutation(async ({ input }) => { + await deleteManualPostgresConnectionString(input.connectionId); + return { ok: true }; + }), + + inspectSqlite: publicProcedure + .input( + z.object({ + databasePath: z.string().min(1), + }), + ) + .query(async ({ input }) => { + ensureAbsoluteFilesystemPath(input.databasePath); + await ensureExistingFile(input.databasePath); + + const db = openSqliteDatabase(input.databasePath); + + try { + const tables = db + .prepare( + ` + SELECT name, type + FROM sqlite_master + WHERE type IN ('table', 'view') + AND name NOT LIKE 'sqlite_%' + ORDER BY type, name + `, + ) + .all() as Array<{ + name: string; + type: "table" | "view"; + }>; + + return { + tables: tables.map((table) => ({ + schema: null, + name: table.name, + type: table.type, + columns: db + .prepare( + `PRAGMA table_info(${quoteSqliteIdentifier(table.name)})`, + ) + .all() as Array<{ + cid: number; + name: string; + type: string; + notnull: 0 | 1; + dflt_value: string | null; + pk: 0 | 1; + }>, + })), + }; + } finally { + db.close(); + } + }), + + inspectPostgres: publicProcedure + .input( + z.object({ + connection: postgresConnectionSourceSchema, + }), + ) + .query(async ({ input }) => { + const connectionString = + await resolvePostgresConnectionStringFromSource({ + source: input.connection, + }); + return await withPostgresClient(connectionString, async (client) => { + const result = await client.query<{ + table_schema: string; + table_name: string; + table_type: string; + column_name: string; + data_type: string; + is_nullable: "YES" | "NO"; + ordinal_position: number; + }>(` + SELECT + t.table_schema, + t.table_name, + t.table_type, + c.column_name, + c.data_type, + c.is_nullable, + c.ordinal_position + FROM information_schema.tables t + JOIN information_schema.columns c + ON t.table_schema = c.table_schema + AND t.table_name = c.table_name + WHERE t.table_schema NOT IN ('pg_catalog', 'information_schema') + ORDER BY t.table_schema, t.table_name, c.ordinal_position + `); + + const tables = new Map< + string, + { + schema: string; + name: string; + type: string; + columns: { + cid: number; + name: string; + type: string; + notnull: 0 | 1; + dflt_value: string | null; + pk: 0 | 1; + }[]; + } + >(); + + for (const row of result.rows) { + const key = `${row.table_schema}.${row.table_name}`; + const current: + | { + schema: string; + name: string; + type: string; + columns: { + cid: number; + name: string; + type: string; + notnull: 0 | 1; + dflt_value: string | null; + pk: 0 | 1; + }[]; + } + | undefined = tables.get(key); + const nextTable = current ?? { + schema: row.table_schema, + name: row.table_name, + type: row.table_type.toLowerCase(), + columns: [] as { + cid: number; + name: string; + type: string; + notnull: 0 | 1; + dflt_value: string | null; + pk: 0 | 1; + }[], + }; + nextTable.columns.push({ + cid: row.ordinal_position, + name: row.column_name, + type: row.data_type, + notnull: row.is_nullable === "NO" ? 1 : 0, + dflt_value: null, + pk: 0, + }); + tables.set(key, nextTable); + } + + return { + tables: Array.from(tables.values()), + }; + }); + }), + + previewSqliteTable: publicProcedure + .input( + z.object({ + databasePath: z.string().min(1), + tableName: z.string().min(1), + limit: z.number().int().positive().max(200).optional(), + offset: z.number().int().min(0).optional(), + }), + ) + .query(async ({ input }) => { + try { + ensureAbsoluteFilesystemPath(input.databasePath); + await ensureExistingFile(input.databasePath); + + const db = openSqliteDatabase(input.databasePath); + const limit = input.limit ?? 50; + const offset = input.offset ?? 0; + const startedAt = performance.now(); + try { + const metadata = getSqliteTableMetadata(db, input.tableName); + const previewSelect = metadata.columns + .map((column) => + buildSqlitePreviewExpression(column.name, column.type), + ) + .join(", "); + const selectColumns = [ + metadata.hasRowId + ? `rowid AS ${quoteSqliteIdentifier(SQLITE_ROW_ID_COLUMN)}` + : null, + buildSqlitePrimaryKeyPreviewExpression( + metadata.primaryKeyColumns, + ), + previewSelect, + ].filter(Boolean); + const statement = db.prepare( + `SELECT ${selectColumns.join(", ")} FROM ${quoteSqliteIdentifier(input.tableName)} LIMIT ? OFFSET ?`, + ); + const previewRows = statement.all(limit + 1, offset) as Array< + Record + >; + const hasMore = previewRows.length > limit; + const rows = hasMore ? previewRows.slice(0, limit) : previewRows; + + return { + columns: statement + .columns() + .map((column) => column.name) + .filter( + (column) => + column !== SQLITE_ROW_ID_COLUMN && + column !== SQLITE_PRIMARY_KEY_COLUMN, + ), + rows, + rowCount: rows.length, + totalRows: null, + hasMore, + offset, + limit, + elapsedMs: Math.round(performance.now() - startedAt), + }; + } finally { + db.close(); + } + } catch (error) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: + error instanceof Error + ? error.message + : "Failed to preview SQLite table.", + }); + } + }), + + getSqliteRowDetail: publicProcedure + .input( + z.object({ + databasePath: z.string().min(1), + tableName: z.string().min(1), + rowId: z.union([z.string(), z.number()]).optional(), + primaryKey: z.string().optional(), + }), + ) + .query(async ({ input }) => { + ensureAbsoluteFilesystemPath(input.databasePath); + await ensureExistingFile(input.databasePath); + + const db = openSqliteDatabase(input.databasePath); + try { + const metadata = getSqliteTableMetadata(db, input.tableName); + let whereClause = ""; + const parameters: Array = []; + + if (metadata.primaryKeyColumns.length > 0) { + if (!input.primaryKey) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: + "Primary key payload is required for this SQLite table.", + }); + } + + let parsedPrimaryKey: Record; + try { + parsedPrimaryKey = JSON.parse(input.primaryKey) as Record< + string, + unknown + >; + } catch { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Invalid SQLite primary key payload.", + }); + } + whereClause = metadata.primaryKeyColumns + .map((column) => { + const value = parsedPrimaryKey[column.name]; + if (value === null) { + return `${quoteSqliteIdentifier(column.name)} IS NULL`; + } + parameters.push((value ?? null) as string | number | null); + return `${quoteSqliteIdentifier(column.name)} = ?`; + }) + .join(" AND "); + } else if (metadata.hasRowId) { + if (input.rowId === undefined) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "rowid is required for this SQLite table.", + }); + } + whereClause = "rowid = ?"; + parameters.push(input.rowId); + } else { + throw new TRPCError({ + code: "BAD_REQUEST", + message: + "This SQLite table has neither a rowid nor a primary key.", + }); + } + + const row = db + .prepare( + `SELECT * FROM ${quoteSqliteIdentifier(input.tableName)} WHERE ${whereClause} LIMIT 1`, + ) + .get(...parameters) as Record | undefined; + + if (!row) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Row not found.", + }); + } + + return { row }; + } finally { + db.close(); + } + }), + + previewPostgresTable: publicProcedure + .input( + z.object({ + connection: postgresConnectionSourceSchema, + schema: z.string().min(1), + tableName: z.string().min(1), + limit: z.number().int().positive().max(200).optional(), + offset: z.number().int().min(0).optional(), + }), + ) + .query(async ({ input }) => { + const limit = input.limit ?? 50; + const offset = input.offset ?? 0; + const startedAt = performance.now(); + const connectionString = + await resolvePostgresConnectionStringFromSource({ + source: input.connection, + }); + + return await withPostgresClient(connectionString, async (client) => { + const columnInfo = await client.query<{ + column_name: string; + data_type: string; + udt_name: string; + ordinal_position: number; + }>( + ` + SELECT column_name, data_type, udt_name, ordinal_position + FROM information_schema.columns + WHERE table_schema = $1 AND table_name = $2 + ORDER BY ordinal_position + `, + [input.schema, input.tableName], + ); + const qualifiedTableName = `${quotePostgresIdentifier(input.schema)}.${quotePostgresIdentifier(input.tableName)}`; + const previewSelect = columnInfo.rows + .map((column) => + buildPostgresPreviewExpression({ + columnName: column.column_name, + dataType: column.data_type, + udtName: column.udt_name, + }), + ) + .join(", "); + const dataResult = await client.query( + `SELECT ctid::text AS ${quotePostgresIdentifier(POSTGRES_ROW_ID_COLUMN)}, ${previewSelect} FROM ${qualifiedTableName} LIMIT $1 OFFSET $2`, + [limit + 1, offset], + ); + const hasMore = dataResult.rows.length > limit; + const rows = hasMore + ? dataResult.rows.slice(0, limit) + : dataResult.rows; + + return { + columns: dataResult.fields + .map((field: { name: string }) => field.name) + .filter((column) => column !== POSTGRES_ROW_ID_COLUMN), + rows, + rowCount: rows.length, + totalRows: null, + hasMore, + offset, + limit, + elapsedMs: Math.round(performance.now() - startedAt), + }; + }); + }), + + getPostgresRowDetail: publicProcedure + .input( + z.object({ + connection: postgresConnectionSourceSchema, + schema: z.string().min(1), + tableName: z.string().min(1), + ctid: z.string().min(1), + }), + ) + .query(async ({ input }) => { + const connectionString = + await resolvePostgresConnectionStringFromSource({ + source: input.connection, + }); + return await withPostgresClient(connectionString, async (client) => { + const qualifiedTableName = `${quotePostgresIdentifier(input.schema)}.${quotePostgresIdentifier(input.tableName)}`; + const result = await client.query( + `SELECT ctid::text AS ${quotePostgresIdentifier(POSTGRES_ROW_ID_COLUMN)}, * FROM ${qualifiedTableName} WHERE ctid = $1::tid LIMIT 1`, + [input.ctid], + ); + + const row = result.rows[0] as Record | undefined; + if (!row) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Row not found.", + }); + } + + return { row }; + }); + }), + + executeSqlite: publicProcedure + .input( + z.object({ + databasePath: z.string().min(1), + sql: z.string().min(1), + limit: z.number().int().positive().max(1000).optional(), + }), + ) + .mutation(async ({ input }) => { + ensureAbsoluteFilesystemPath(input.databasePath); + await ensureExistingFile(input.databasePath); + + const sql = input.sql.trim(); + if (!sql) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "SQL is required.", + }); + } + + const db = openSqliteDatabase(input.databasePath); + const startedAt = performance.now(); + + try { + const statement = db.prepare(sql); + const limit = input.limit ?? 200; + + if (!statement.reader) { + const result = statement.run(); + return { + columns: [] as string[], + rows: [] as Array>, + rowCount: result.changes, + truncated: false, + elapsedMs: Math.round(performance.now() - startedAt), + command: "write", + lastInsertRowid: + typeof result.lastInsertRowid === "bigint" + ? result.lastInsertRowid.toString() + : result.lastInsertRowid, + }; + } + + const rows: Array> = []; + let truncated = false; + for (const row of statement.iterate() as Iterable< + Record + >) { + if (rows.length >= limit) { + truncated = true; + break; + } + rows.push(row); + } + + return { + columns: statement.columns().map((column) => column.name), + rows, + rowCount: rows.length, + truncated, + elapsedMs: Math.round(performance.now() - startedAt), + command: "read", + }; + } catch (error) { + if (error instanceof TRPCError) { + throw error; + } + + throw new TRPCError({ + code: "BAD_REQUEST", + message: + error instanceof Error ? error.message : "Failed to execute SQL.", + }); + } finally { + db.close(); + } + }), + + executePostgres: publicProcedure + .input( + z.object({ + connection: postgresConnectionSourceSchema, + sql: z.string().min(1), + limit: z.number().int().positive().max(1000).optional(), + }), + ) + .mutation(async ({ input }) => { + const sql = input.sql.trim(); + if (!sql) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "SQL is required.", + }); + } + + const startedAt = performance.now(); + const connectionString = + await resolvePostgresConnectionStringFromSource({ + source: input.connection, + }); + return await withPostgresClient(connectionString, async (client) => { + const limit = input.limit ?? 200; + if (canApplyPostgresReadLimit(sql)) { + const limitedSql = `SELECT * FROM (${stripTrailingSemicolon( + sql, + )}) AS __superset_query LIMIT ${limit + 1}`; + const limitedResult = await client.query(limitedSql); + const truncated = limitedResult.rows.length > limit; + const rows = truncated + ? limitedResult.rows.slice(0, limit) + : limitedResult.rows; + + return { + columns: limitedResult.fields.map( + (field: { name: string }) => field.name, + ), + rows, + rowCount: rows.length, + truncated, + elapsedMs: Math.round(performance.now() - startedAt), + command: "SELECT", + }; + } + + const result = await client.query(sql); + + return { + columns: result.fields.map((field: { name: string }) => field.name), + rows: result.rows.slice(0, limit), + rowCount: result.rowCount ?? result.rows.length, + truncated: result.rows.length > limit, + elapsedMs: Math.round(performance.now() - startedAt), + command: result.command, + }; + }); + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/databases/workspace-config.ts b/apps/desktop/src/lib/trpc/routers/databases/workspace-config.ts new file mode 100644 index 00000000000..888a31a9797 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/databases/workspace-config.ts @@ -0,0 +1,604 @@ +import { chmod, mkdir, readFile, writeFile } from "node:fs/promises"; +import path from "node:path"; +import { TRPCError } from "@trpc/server"; + +// Simple per-file async mutex to prevent concurrent read/modify/write races. +function createFileMutex() { + let queue = Promise.resolve(); + return function withLock(fn: () => Promise): Promise { + const next = queue.then(fn, fn); + queue = next.then( + () => undefined, + () => undefined, + ); + return next; + }; +} +const withCredentialStoreLock = createFileMutex(); +const withManualConnectionStoreLock = createFileMutex(); + +import { + SUPERSET_HOME_DIR, + SUPERSET_SENSITIVE_FILE_MODE, +} from "main/lib/app-environment"; +import { z } from "zod"; +import { decrypt, encrypt } from "../auth/utils/crypto-storage"; + +const WORKSPACE_DATABASES_CONFIG_FILE = path.join( + ".superset", + "databases.json", +); +const WORKSPACE_DATABASE_CREDENTIALS_FILE = path.join( + SUPERSET_HOME_DIR, + "workspace-database-credentials.enc", +); + +const workspaceDatabaseBaseSchema = z.object({ + id: z.string().min(1), + label: z.string().min(1), + group: z.string().trim().min(1).optional(), +}); + +const sqliteWorkspaceDatabaseSchema = workspaceDatabaseBaseSchema.extend({ + dialect: z.literal("sqlite"), + path: z.string().min(1), +}); + +const postgresWorkspaceDatabaseSchema = workspaceDatabaseBaseSchema.extend({ + dialect: z.literal("postgres"), + host: z.string().min(1), + port: z.number().int().positive().max(65535).optional(), + database: z.preprocess( + (value) => + typeof value === "string" && value.trim().length === 0 + ? undefined + : value, + z.string().min(1).default("postgres"), + ), + ssl: z.boolean().optional(), + username: z.string().min(1).optional(), +}); + +export const workspaceDatabaseDefinitionSchema = z.discriminatedUnion( + "dialect", + [sqliteWorkspaceDatabaseSchema, postgresWorkspaceDatabaseSchema], +); + +const workspaceDatabaseConfigSchema = z.object({ + databases: z.array(workspaceDatabaseDefinitionSchema).default([]), +}); + +export const postgresConnectionSourceSchema = z.discriminatedUnion("kind", [ + z.object({ + kind: z.literal("connectionString"), + connectionStringId: z.string().min(1), + }), + z.object({ + kind: z.literal("workspaceConfig"), + workspacePath: z.string().min(1), + definitionId: z.string().min(1), + }), +]); + +const workspaceDatabaseCredentialEntrySchema = z.object({ + username: z.string().min(1), + password: z.string(), + updatedAt: z.number().int().nonnegative(), +}); + +const workspaceDatabaseCredentialStoreSchema = z.object({ + entries: z + .record(z.string(), workspaceDatabaseCredentialEntrySchema) + .default({}), +}); + +export type WorkspaceDatabaseDefinition = z.infer< + typeof workspaceDatabaseDefinitionSchema +>; +export type WorkspaceConfiguredDatabaseDiscoveryItem = + | { + source: "config"; + dialect: "sqlite"; + definitionId: string; + label: string; + group?: string; + absolutePath: string; + relativePath: string; + } + | { + source: "config"; + dialect: "postgres"; + definitionId: string; + label: string; + group?: string; + host: string; + port: number; + database: string; + ssl: boolean; + usernameHint?: string; + relativePath: string; + hasSavedCredentials: boolean; + }; + +function workspaceCredentialKey( + workspacePath: string, + definitionId: string, +): string { + return `${workspacePath}::${definitionId}`; +} + +function buildPostgresConnectionString(input: { + host: string; + port: number; + username: string; + password: string; + database: string; + ssl: boolean; +}): string { + const auth = + input.password.trim().length > 0 + ? `${encodeURIComponent(input.username)}:${encodeURIComponent(input.password)}` + : encodeURIComponent(input.username); + const query = input.ssl ? "?sslmode=require" : ""; + const trimmedHost = input.host.trim(); + const host = + trimmedHost.startsWith("[") && trimmedHost.endsWith("]") + ? trimmedHost + : trimmedHost.includes(":") + ? `[${trimmedHost}]` + : trimmedHost; + return `postgres://${auth}@${host}:${input.port}/${input.database}${query}`; +} + +function getPostgresDatabaseName( + definition: Extract, +): string { + return definition.database; +} + +async function loadWorkspaceDatabaseCredentialStore(): Promise< + z.infer +> { + try { + const decrypted = decrypt( + await readFile(WORKSPACE_DATABASE_CREDENTIALS_FILE), + ); + return workspaceDatabaseCredentialStoreSchema.parse(JSON.parse(decrypted)); + } catch (error) { + if ((error as NodeJS.ErrnoException | undefined)?.code === "ENOENT") { + return { entries: {} }; + } + throw error; + } +} + +async function saveWorkspaceDatabaseCredentialStore( + store: z.infer, +): Promise { + await mkdir(SUPERSET_HOME_DIR, { recursive: true, mode: 0o700 }); + await writeFile( + WORKSPACE_DATABASE_CREDENTIALS_FILE, + encrypt(JSON.stringify(store)), + { mode: SUPERSET_SENSITIVE_FILE_MODE }, + ); + await chmod( + WORKSPACE_DATABASE_CREDENTIALS_FILE, + SUPERSET_SENSITIVE_FILE_MODE, + ).catch(() => undefined); +} + +export async function loadWorkspaceDatabaseDefinitions( + workspacePath: string, +): Promise<{ + configPath: string; + definitions: WorkspaceDatabaseDefinition[]; +}> { + const configPath = path.join(workspacePath, WORKSPACE_DATABASES_CONFIG_FILE); + + try { + const raw = await readFile(configPath, "utf8"); + const parsed = workspaceDatabaseConfigSchema.parse(JSON.parse(raw)); + return { + configPath, + definitions: parsed.databases, + }; + } catch (error) { + if ((error as NodeJS.ErrnoException | undefined)?.code === "ENOENT") { + return { configPath, definitions: [] }; + } + + if (error instanceof z.ZodError) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: `Invalid workspace database config: ${error.issues[0]?.message ?? "Unknown schema error"}`, + }); + } + + if (error instanceof SyntaxError) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Invalid JSON in .superset/databases.json", + }); + } + + throw error; + } +} + +function toWorkspaceConfigSqlitePath( + workspacePath: string, + databasePath: string, +): string { + const absoluteDatabasePath = path.resolve(workspacePath, databasePath); + const relativePath = path.relative(workspacePath, absoluteDatabasePath); + + if ( + relativePath.length > 0 && + !relativePath.startsWith("..") && + !path.isAbsolute(relativePath) + ) { + return relativePath; + } + + return absoluteDatabasePath; +} + +async function writeWorkspaceDatabaseDefinitions(input: { + configPath: string; + config: Record; +}): Promise { + await mkdir(path.dirname(input.configPath), { recursive: true }); + await writeFile( + input.configPath, + `${JSON.stringify(input.config, null, 2)}\n`, + "utf8", + ); +} + +export async function discoverWorkspaceConfiguredDatabases( + workspacePath: string, +): Promise { + const { definitions } = await loadWorkspaceDatabaseDefinitions(workspacePath); + if (definitions.length === 0) { + return []; + } + + const credentialStore = await loadWorkspaceDatabaseCredentialStore(); + + return definitions.map((definition) => { + if (definition.dialect === "sqlite") { + return { + source: "config", + dialect: "sqlite", + definitionId: definition.id, + label: definition.label, + group: definition.group, + absolutePath: path.resolve(workspacePath, definition.path), + relativePath: path.join( + WORKSPACE_DATABASES_CONFIG_FILE, + `#${definition.id}`, + ), + }; + } + + const key = workspaceCredentialKey(workspacePath, definition.id); + return { + source: "config", + dialect: "postgres", + definitionId: definition.id, + label: definition.label, + group: definition.group, + host: definition.host, + port: definition.port ?? 5432, + database: getPostgresDatabaseName(definition), + ssl: definition.ssl ?? false, + usernameHint: + definition.username ?? credentialStore.entries[key]?.username, + relativePath: path.join( + WORKSPACE_DATABASES_CONFIG_FILE, + `#${definition.id}`, + ), + hasSavedCredentials: Boolean(credentialStore.entries[key]), + }; + }); +} + +export async function saveWorkspaceDatabaseCredentials(input: { + workspacePath: string; + definitionId: string; + username: string; + password: string; +}): Promise { + await withCredentialStoreLock(async () => { + const store = await loadWorkspaceDatabaseCredentialStore(); + store.entries[ + workspaceCredentialKey(input.workspacePath, input.definitionId) + ] = { + username: input.username.trim(), + password: input.password, + updatedAt: Date.now(), + }; + await saveWorkspaceDatabaseCredentialStore(store); + }); +} + +export async function updateWorkspaceDatabaseDefinition(input: { + workspacePath: string; + definitionId: string; + definition: + | { + dialect: "sqlite"; + label: string; + group?: string; + databasePath: string; + } + | { + dialect: "postgres"; + label: string; + group?: string; + host: string; + port: number; + database?: string; + ssl: boolean; + username?: string; + }; +}): Promise { + const { configPath, definitions } = await loadWorkspaceDatabaseDefinitions( + input.workspacePath, + ); + const definitionIndex = definitions.findIndex( + (candidate) => candidate.id === input.definitionId, + ); + + if (definitionIndex === -1) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Workspace database definition not found.", + }); + } + + const currentDefinition = definitions[definitionIndex]; + const nextDefinition = workspaceDatabaseDefinitionSchema.parse( + input.definition.dialect === "sqlite" + ? { + id: input.definitionId, + dialect: "sqlite", + label: input.definition.label, + group: input.definition.group, + path: toWorkspaceConfigSqlitePath( + input.workspacePath, + input.definition.databasePath, + ), + } + : { + id: input.definitionId, + dialect: "postgres", + label: input.definition.label, + group: input.definition.group, + host: input.definition.host, + port: input.definition.port, + database: input.definition.database, + ssl: input.definition.ssl, + username: input.definition.username, + }, + ); + + const rawConfig = JSON.parse(await readFile(configPath, "utf8")) as { + databases?: unknown[]; + [key: string]: unknown; + }; + const rawDefinitions = Array.isArray(rawConfig.databases) + ? [...rawConfig.databases] + : []; + const currentRawDefinition = + typeof rawDefinitions[definitionIndex] === "object" && + rawDefinitions[definitionIndex] !== null + ? (rawDefinitions[definitionIndex] as Record) + : {}; + + const nextRawDefinition: Record = + nextDefinition.dialect === "sqlite" + ? { + ...currentRawDefinition, + id: nextDefinition.id, + label: nextDefinition.label, + dialect: "sqlite", + path: nextDefinition.path, + } + : { + ...currentRawDefinition, + id: nextDefinition.id, + label: nextDefinition.label, + dialect: "postgres", + host: nextDefinition.host, + port: nextDefinition.port, + database: nextDefinition.database, + ssl: nextDefinition.ssl, + username: nextDefinition.username, + }; + + if (nextDefinition.group) { + nextRawDefinition.group = nextDefinition.group; + } else { + delete nextRawDefinition.group; + } + + if (nextDefinition.dialect === "postgres") { + delete nextRawDefinition.path; + if (!nextDefinition.username) { + delete nextRawDefinition.username; + } + } else { + delete nextRawDefinition.host; + delete nextRawDefinition.port; + delete nextRawDefinition.database; + delete nextRawDefinition.ssl; + delete nextRawDefinition.username; + } + + rawDefinitions[definitionIndex] = nextRawDefinition; + await writeWorkspaceDatabaseDefinitions({ + configPath, + config: { + ...rawConfig, + databases: rawDefinitions, + }, + }); + + if ( + currentDefinition.dialect === "postgres" && + nextDefinition.dialect === "postgres" && + nextDefinition.username + ) { + await withCredentialStoreLock(async () => { + const store = await loadWorkspaceDatabaseCredentialStore(); + const credentialKey = workspaceCredentialKey( + input.workspacePath, + input.definitionId, + ); + const existingCredentials = store.entries[credentialKey]; + if (existingCredentials) { + store.entries[credentialKey] = { + ...existingCredentials, + username: nextDefinition.username ?? existingCredentials.username, + updatedAt: Date.now(), + }; + await saveWorkspaceDatabaseCredentialStore(store); + } + }); + } + + return nextDefinition; +} + +const manualPostgresConnectionStoreSchema = z.object({ + entries: z + .record( + z.string(), + z.object({ + connectionString: z.string().min(1), + updatedAt: z.number().int().nonnegative(), + }), + ) + .default({}), +}); + +const MANUAL_POSTGRES_CONNECTIONS_FILE = path.join( + SUPERSET_HOME_DIR, + "manual-postgres-connections.enc", +); + +async function loadManualPostgresConnectionStore(): Promise< + z.infer +> { + try { + const decrypted = decrypt(await readFile(MANUAL_POSTGRES_CONNECTIONS_FILE)); + return manualPostgresConnectionStoreSchema.parse(JSON.parse(decrypted)); + } catch (error) { + if ((error as NodeJS.ErrnoException | undefined)?.code === "ENOENT") { + return { entries: {} }; + } + throw error; + } +} + +async function saveManualPostgresConnectionStore( + store: z.infer, +): Promise { + await mkdir(SUPERSET_HOME_DIR, { recursive: true, mode: 0o700 }); + await writeFile( + MANUAL_POSTGRES_CONNECTIONS_FILE, + encrypt(JSON.stringify(store)), + { mode: SUPERSET_SENSITIVE_FILE_MODE }, + ); + await chmod( + MANUAL_POSTGRES_CONNECTIONS_FILE, + SUPERSET_SENSITIVE_FILE_MODE, + ).catch(() => undefined); +} + +export async function saveManualPostgresConnectionString( + connectionId: string, + connectionString: string, +): Promise { + await withManualConnectionStoreLock(async () => { + const store = await loadManualPostgresConnectionStore(); + store.entries[connectionId] = { + connectionString, + updatedAt: Date.now(), + }; + await saveManualPostgresConnectionStore(store); + }); +} + +export async function getManualPostgresConnectionString( + connectionId: string, +): Promise { + const store = await loadManualPostgresConnectionStore(); + return store.entries[connectionId]?.connectionString ?? null; +} + +export async function deleteManualPostgresConnectionString( + connectionId: string, +): Promise { + await withManualConnectionStoreLock(async () => { + const store = await loadManualPostgresConnectionStore(); + delete store.entries[connectionId]; + await saveManualPostgresConnectionStore(store); + }); +} + +export async function resolvePostgresConnectionStringFromSource(input: { + source: z.infer; +}): Promise { + const source = input.source; + if (source.kind === "connectionString") { + const connectionString = await getManualPostgresConnectionString( + source.connectionStringId, + ); + if (!connectionString) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Manual Postgres connection string not found.", + }); + } + return connectionString; + } + + const { definitions } = await loadWorkspaceDatabaseDefinitions( + source.workspacePath, + ); + const definition = definitions.find( + (candidate) => candidate.id === source.definitionId, + ); + + if (!definition || definition.dialect !== "postgres") { + throw new TRPCError({ + code: "NOT_FOUND", + message: "Workspace database definition not found.", + }); + } + + const credentialStore = await loadWorkspaceDatabaseCredentialStore(); + const credentials = + credentialStore.entries[ + workspaceCredentialKey(source.workspacePath, source.definitionId) + ]; + + if (!credentials) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: + "Credentials for this workspace database have not been saved yet.", + }); + } + + return buildPostgresConnectionString({ + host: definition.host, + port: definition.port ?? 5432, + username: credentials.username, + password: credentials.password, + database: getPostgresDatabaseName(definition), + ssl: definition.ssl ?? false, + }); +} diff --git a/apps/desktop/src/lib/trpc/routers/diagnostics/index.ts b/apps/desktop/src/lib/trpc/routers/diagnostics/index.ts new file mode 100644 index 00000000000..e68297cdcf1 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/diagnostics/index.ts @@ -0,0 +1,562 @@ +import path from "node:path"; +import { TRPCError } from "@trpc/server"; +import * as ts from "typescript"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; +import { getWorkspace } from "../workspaces/utils/db-helpers"; +import { getWorkspacePath } from "../workspaces/utils/worktree"; + +const MAX_PROBLEMS = 500; + +const openDocumentSchema = z.object({ + relativePath: z.string(), + content: z.string().nullable(), +}); + +const typeScriptProblemSchema = z.object({ + relativePath: z.string().nullable(), + line: z.number().nullable(), + column: z.number().nullable(), + endLine: z.number().nullable(), + endColumn: z.number().nullable(), + message: z.string(), + code: z.union([z.string(), z.number()]).nullable(), + severity: z.enum(["error", "warning", "info", "hint"]), + source: z.string(), +}); + +function resolveConfigPath(workspacePath: string): string | null { + const tsconfigPath = path.join(workspacePath, "tsconfig.json"); + if (ts.sys.fileExists(tsconfigPath)) { + return tsconfigPath; + } + + const jsconfigPath = path.join(workspacePath, "jsconfig.json"); + if (ts.sys.fileExists(jsconfigPath)) { + return jsconfigPath; + } + + return null; +} + +function findNearestConfigPath( + workspacePath: string, + relativePath: string, +): string | null { + let currentDirectory = path.resolve( + workspacePath, + path.dirname(relativePath), + ); + const normalizedWorkspacePath = path.resolve(workspacePath); + + while (true) { + const tsconfigPath = path.join(currentDirectory, "tsconfig.json"); + if (ts.sys.fileExists(tsconfigPath)) { + return tsconfigPath; + } + + const jsconfigPath = path.join(currentDirectory, "jsconfig.json"); + if (ts.sys.fileExists(jsconfigPath)) { + return jsconfigPath; + } + + if (currentDirectory === normalizedWorkspacePath) { + return null; + } + + const parentDirectory = path.dirname(currentDirectory); + if (parentDirectory === currentDirectory) { + return null; + } + + currentDirectory = parentDirectory; + } +} + +function mapSeverity( + category: ts.DiagnosticCategory, +): "error" | "warning" | "info" | "hint" { + switch (category) { + case ts.DiagnosticCategory.Error: + return "error"; + case ts.DiagnosticCategory.Warning: + return "warning"; + case ts.DiagnosticCategory.Suggestion: + return "hint"; + default: + return "info"; + } +} + +function normalizeRelativePath( + workspacePath: string, + fileName: string, +): string | null { + const relativePath = path.relative(workspacePath, fileName); + if ( + !relativePath || + relativePath.startsWith("..") || + path.isAbsolute(relativePath) + ) { + return null; + } + + return relativePath.split(path.sep).join("/"); +} + +function diagnosticSortValue(severity: string): number { + switch (severity) { + case "error": + return 0; + case "warning": + return 1; + case "info": + return 2; + default: + return 3; + } +} + +function createOpenDocumentMap( + workspacePath: string, + openDocuments: Array<{ relativePath: string; content: string | null }>, +): Map { + return openDocuments.reduce((map, document) => { + if (document.content === null) { + return map; + } + + map.set( + path.resolve(workspacePath, document.relativePath), + document.content, + ); + return map; + }, new Map()); +} + +function createCompilerHostWithOpenDocuments( + options: ts.CompilerOptions, + openDocumentMap: Map, +): ts.CompilerHost { + const compilerHost = ts.createCompilerHost(options, true); + const originalReadFile = compilerHost.readFile.bind(compilerHost); + const originalFileExists = compilerHost.fileExists.bind(compilerHost); + const originalGetSourceFile = compilerHost.getSourceFile.bind(compilerHost); + + compilerHost.readFile = (fileName) => { + const override = openDocumentMap.get(path.resolve(fileName)); + if (override !== undefined) { + return override; + } + + return originalReadFile(fileName); + }; + + compilerHost.fileExists = (fileName) => { + if (openDocumentMap.has(path.resolve(fileName))) { + return true; + } + + return originalFileExists(fileName); + }; + + compilerHost.getSourceFile = ( + fileName, + languageVersionOrOptions, + onError, + shouldCreateNewSourceFile, + ) => { + const override = openDocumentMap.get(path.resolve(fileName)); + if (override !== undefined) { + return ts.createSourceFile( + fileName, + override, + languageVersionOrOptions, + true, + ); + } + + return originalGetSourceFile( + fileName, + languageVersionOrOptions, + onError, + shouldCreateNewSourceFile, + ); + }; + + return compilerHost; +} + +function getStandaloneCompilerOptions(filePath: string): ts.CompilerOptions { + const extension = path.extname(filePath).toLowerCase(); + return { + noEmit: true, + allowJs: [".js", ".jsx", ".mjs", ".cjs"].includes(extension), + checkJs: [".js", ".jsx", ".mjs", ".cjs"].includes(extension), + jsx: [".jsx", ".tsx"].includes(extension) ? ts.JsxEmit.Preserve : undefined, + target: ts.ScriptTarget.ESNext, + module: ts.ModuleKind.ESNext, + skipLibCheck: true, + moduleResolution: ts.ModuleResolutionKind.Bundler, + }; +} + +function createProblemKey(problem: { + relativePath: string | null; + line: number | null; + column: number | null; + message: string; + code: string | number | null; + severity: string; + source: string; +}): string { + return [ + problem.relativePath ?? "workspace", + problem.line ?? 0, + problem.column ?? 0, + problem.code ?? "no-code", + problem.severity, + problem.source, + problem.message, + ].join("::"); +} + +function mapDiagnosticsToProblems( + diagnostics: readonly ts.Diagnostic[], + workspacePath: string, +) { + return diagnostics + .map((diagnostic) => { + const message = ts.flattenDiagnosticMessageText( + diagnostic.messageText, + "\n", + ); + const severity = mapSeverity(diagnostic.category); + const relativePath = diagnostic.file?.fileName + ? normalizeRelativePath(workspacePath, diagnostic.file.fileName) + : null; + + if (diagnostic.file?.fileName && relativePath === null) { + return null; + } + + const start = + diagnostic.file && typeof diagnostic.start === "number" + ? diagnostic.file.getLineAndCharacterOfPosition(diagnostic.start) + : null; + const end = + diagnostic.file && + typeof diagnostic.start === "number" && + typeof diagnostic.length === "number" + ? diagnostic.file.getLineAndCharacterOfPosition( + diagnostic.start + diagnostic.length, + ) + : null; + + return { + relativePath, + line: start ? start.line + 1 : null, + column: start ? start.character + 1 : null, + endLine: end ? end.line + 1 : null, + endColumn: end ? end.character + 1 : null, + message, + code: diagnostic.code ?? null, + severity, + source: "typescript", + }; + }) + .filter( + (problem): problem is NonNullable => problem !== null, + ); +} + +function filterProblemsForOpenDocuments( + problems: Array>, + openDocuments: Array<{ relativePath: string; content: string | null }>, +) { + if (openDocuments.length === 0) { + return problems; + } + + const openDocumentPaths = new Set( + openDocuments.map((document) => document.relativePath), + ); + + return problems.filter((problem) => { + if (problem.relativePath === null) { + return false; + } + + return openDocumentPaths.has(problem.relativePath); + }); +} + +export const createDiagnosticsRouter = () => { + return router({ + getTypeScriptProblems: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + openDocuments: z.array(openDocumentSchema).default([]), + }), + ) + .output( + z.object({ + status: z.enum(["ready", "no-config"]), + workspacePath: z.string(), + configPath: z.string().nullable(), + problems: z.array(typeScriptProblemSchema), + totalCount: z.number(), + truncated: z.boolean(), + summary: z.object({ + errorCount: z.number(), + warningCount: z.number(), + infoCount: z.number(), + hintCount: z.number(), + }), + }), + ) + .query(({ input }) => { + const workspace = getWorkspace(input.workspaceId); + if (!workspace) { + throw new TRPCError({ + code: "NOT_FOUND", + message: `Workspace ${input.workspaceId} not found`, + }); + } + + const workspacePath = getWorkspacePath(workspace); + if (!workspacePath) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: `Workspace ${input.workspaceId} has no filesystem path`, + }); + } + + const rootConfigPath = resolveConfigPath(workspacePath); + const configPaths = new Set(); + const standaloneFiles: string[] = []; + const openDocumentMap = createOpenDocumentMap( + workspacePath, + input.openDocuments, + ); + + if (input.openDocuments.length > 0) { + for (const document of input.openDocuments) { + const configPath = findNearestConfigPath( + workspacePath, + document.relativePath, + ); + if (configPath) { + configPaths.add(configPath); + } else { + standaloneFiles.push( + path.resolve(workspacePath, document.relativePath), + ); + } + } + } else if (rootConfigPath) { + configPaths.add(rootConfigPath); + } + + if (configPaths.size === 0 && standaloneFiles.length === 0) { + console.log("[diagnostics] no config found", { + workspaceId: input.workspaceId, + workspacePath, + openDocuments: input.openDocuments.map( + (document) => document.relativePath, + ), + }); + return { + status: "no-config" as const, + workspacePath, + configPath: null, + problems: [], + totalCount: 0, + truncated: false, + summary: { + errorCount: 0, + warningCount: 0, + infoCount: 0, + hintCount: 0, + }, + }; + } + + const collectedProblems = new Map< + string, + z.infer + >(); + const configPathList = Array.from(configPaths); + + console.log("[diagnostics] target documents", { + workspaceId: input.workspaceId, + workspacePath, + openDocuments: input.openDocuments.map((document) => ({ + relativePath: document.relativePath, + hasOverride: document.content !== null, + })), + configPaths: configPathList, + standaloneFiles: standaloneFiles.map((filePath) => + normalizeRelativePath(workspacePath, filePath), + ), + }); + + for (const configPath of configPathList) { + const configFile = ts.readConfigFile(configPath, ts.sys.readFile); + if (configFile.error) { + const problem = { + relativePath: normalizeRelativePath(workspacePath, configPath), + line: null, + column: null, + endLine: null, + endColumn: null, + message: ts.flattenDiagnosticMessageText( + configFile.error.messageText, + "\n", + ), + code: configFile.error.code, + severity: mapSeverity(configFile.error.category), + source: "typescript", + }; + collectedProblems.set(createProblemKey(problem), problem); + continue; + } + + const parsedConfig = ts.parseJsonConfigFileContent( + configFile.config, + ts.sys, + path.dirname(configPath), + { noEmit: true }, + configPath, + ); + const configOpenFiles = input.openDocuments + .filter( + (document) => + findNearestConfigPath(workspacePath, document.relativePath) === + configPath, + ) + .map((document) => + path.resolve(workspacePath, document.relativePath), + ); + const rootNames = Array.from( + new Set([...parsedConfig.fileNames, ...configOpenFiles]), + ); + console.log("[diagnostics] parsed config", { + workspaceId: input.workspaceId, + workspacePath, + configPath, + openDocumentCount: input.openDocuments.length, + rootFileCount: rootNames.length, + sampleRootFiles: rootNames + .slice(0, 20) + .map((fileName) => + normalizeRelativePath(workspacePath, fileName), + ), + }); + + const compilerHost = createCompilerHostWithOpenDocuments( + parsedConfig.options, + openDocumentMap, + ); + const program = ts.createProgram({ + rootNames, + options: parsedConfig.options, + projectReferences: parsedConfig.projectReferences, + host: compilerHost, + }); + const diagnostics = [ + ...parsedConfig.errors, + ...ts.getPreEmitDiagnostics(program), + ]; + for (const problem of mapDiagnosticsToProblems( + diagnostics, + workspacePath, + )) { + collectedProblems.set(createProblemKey(problem), problem); + } + } + + for (const standaloneFilePath of standaloneFiles) { + const compilerOptions = + getStandaloneCompilerOptions(standaloneFilePath); + const compilerHost = createCompilerHostWithOpenDocuments( + compilerOptions, + openDocumentMap, + ); + const program = ts.createProgram({ + rootNames: [standaloneFilePath], + options: compilerOptions, + host: compilerHost, + }); + for (const problem of mapDiagnosticsToProblems( + ts.getPreEmitDiagnostics(program), + workspacePath, + )) { + collectedProblems.set(createProblemKey(problem), problem); + } + } + + const mappedProblems = filterProblemsForOpenDocuments( + Array.from(collectedProblems.values()), + input.openDocuments, + ).sort((left, right) => { + const severityDiff = + diagnosticSortValue(left.severity) - + diagnosticSortValue(right.severity); + if (severityDiff !== 0) { + return severityDiff; + } + + const pathDiff = (left.relativePath ?? "").localeCompare( + right.relativePath ?? "", + ); + if (pathDiff !== 0) { + return pathDiff; + } + return (left.line ?? 0) - (right.line ?? 0); + }); + + const summary = mappedProblems.reduce( + (acc, problem) => { + if (problem.severity === "error") acc.errorCount += 1; + if (problem.severity === "warning") acc.warningCount += 1; + if (problem.severity === "info") acc.infoCount += 1; + if (problem.severity === "hint") acc.hintCount += 1; + return acc; + }, + { + errorCount: 0, + warningCount: 0, + infoCount: 0, + hintCount: 0, + }, + ); + + console.log("[diagnostics] result", { + workspaceId: input.workspaceId, + configPaths: configPathList, + totalCount: mappedProblems.length, + problemFiles: Array.from( + new Set( + mappedProblems.map( + (problem) => problem.relativePath ?? "Workspace", + ), + ), + ), + }); + + return { + status: "ready" as const, + workspacePath, + configPath: configPathList.length === 1 ? configPathList[0] : null, + problems: mappedProblems.slice(0, MAX_PROBLEMS), + totalCount: mappedProblems.length, + truncated: mappedProblems.length > MAX_PROBLEMS, + summary, + }; + }), + }); +}; + +export type DiagnosticsRouter = ReturnType; diff --git a/apps/desktop/src/lib/trpc/routers/docker/index.ts b/apps/desktop/src/lib/trpc/routers/docker/index.ts new file mode 100644 index 00000000000..143e623c90c --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/docker/index.ts @@ -0,0 +1,677 @@ +import { + type ExecFileOptionsWithStringEncoding, + execFile, +} from "node:child_process"; +import type { Dirent } from "node:fs"; +import { readdir } from "node:fs/promises"; +import path from "node:path"; +import { promisify } from "node:util"; +import { workspaces } from "@superset/local-db"; +import { TRPCError } from "@trpc/server"; +import { eq } from "drizzle-orm"; +import { localDb } from "main/lib/local-db"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; +import { getProcessEnvWithShellPath } from "../workspaces/utils/shell-env"; +import { getWorkspacePath } from "../workspaces/utils/worktree"; + +const execFileAsync = promisify(execFile); + +const COMPOSE_FILE_NAMES = new Set([ + "docker-compose.yml", + "docker-compose.yaml", + "compose.yml", + "compose.yaml", +]); + +const DOCKERFILE_EXACT_NAMES = new Set(["Dockerfile", "Containerfile"]); + +function isDockerfileName(name: string): boolean { + if (DOCKERFILE_EXACT_NAMES.has(name)) { + return true; + } + // Dockerfile.dev, Dockerfile.prod, etc. + if (name.startsWith("Dockerfile.") || name.startsWith("Containerfile.")) { + return true; + } + // foo.dockerfile + if (name.endsWith(".dockerfile")) { + return true; + } + return false; +} + +const IGNORED_DIRECTORIES = new Set([ + ".git", + ".next", + ".superset", + ".turbo", + "build", + "coverage", + "dist", + "node_modules", + "out", + "target", +]); + +const SAFE_CONTAINER_ID = z + .string() + .min(1) + .max(256) + .regex(/^[A-Za-z0-9_.-]+$/u, "Invalid container identifier"); + +const composeActionInput = z.object({ + workspaceId: z.string(), + composeFilePath: z.string().min(1), +}); + +const containerActionInput = z.object({ + workspaceId: z.string(), + containerId: SAFE_CONTAINER_ID, +}); + +interface ComposeFileSummary { + absolutePath: string; + directoryPath: string; + projectName: string; + relativePath: string; +} + +interface DockerfileSummary { + absolutePath: string; + directoryPath: string; + name: string; + relativePath: string; +} + +interface DockerPsContainerRow { + Command?: string; + ID?: string; + Image?: string; + Labels?: string; + Names?: string; + Ports?: string; + State?: string; + Status?: string; +} + +interface DockerContainerSummary { + command: string; + composeFilePaths: string[]; + id: string; + image: string; + name: string; + ports: string; + service: string | null; + state: string; + status: string; +} + +function normalizeExecError(error: unknown): never { + if ( + typeof error === "object" && + error !== null && + "code" in error && + error.code === "ENOENT" + ) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: + "Docker CLI が見つかりません。Docker Desktop または docker CLI をインストールしてください。", + }); + } + + const stderr = + typeof error === "object" && + error !== null && + "stderr" in error && + typeof error.stderr === "string" + ? error.stderr.trim() + : ""; + + throw new TRPCError({ + code: "BAD_REQUEST", + message: + stderr.length > 0 + ? stderr + : error instanceof Error + ? error.message + : "Docker command failed", + }); +} + +async function execDocker( + args: string[], + options?: Omit, +): Promise { + const env = await getProcessEnvWithShellPath( + options?.env ? { ...process.env, ...options.env } : process.env, + ); + + const { stdout } = await execFileAsync("docker", args, { + ...options, + encoding: "utf8", + env, + maxBuffer: 8 * 1024 * 1024, + }); + + return stdout; +} + +function parseLabelString(labelString: string): Record { + if (!labelString.trim()) { + return {}; + } + + const labels: Record = {}; + for (const part of labelString.split(",")) { + const separatorIndex = part.indexOf("="); + if (separatorIndex <= 0) { + continue; + } + + const key = part.slice(0, separatorIndex).trim(); + const value = part.slice(separatorIndex + 1).trim(); + if (key.length > 0) { + labels[key] = value; + } + } + + return labels; +} + +function parseDockerPsJsonLines(stdout: string): DockerPsContainerRow[] { + return stdout + .split("\n") + .map((line) => line.trim()) + .filter((line) => line.length > 0) + .map((line) => JSON.parse(line) as DockerPsContainerRow); +} + +function mapContainerSummary( + row: DockerPsContainerRow, +): DockerContainerSummary { + const labels = parseLabelString(row.Labels ?? ""); + const composeFilePaths = ( + labels["com.docker.compose.project.config_files"] ?? "" + ) + .split(",") + .map((entry) => entry.trim()) + .filter((entry) => entry.length > 0); + + return { + command: row.Command ?? "", + composeFilePaths, + id: row.ID ?? "", + image: row.Image ?? "", + name: row.Names ?? "", + ports: row.Ports ?? "", + service: labels["com.docker.compose.service"] ?? null, + state: row.State ?? "unknown", + status: row.Status ?? "", + }; +} + +function isIgnoredDirectory(name: string): boolean { + return IGNORED_DIRECTORIES.has(name); +} + +async function findComposeFiles( + rootPath: string, +): Promise { + const queue: string[] = [rootPath]; + const composeFiles: ComposeFileSummary[] = []; + + while (queue.length > 0) { + const currentDir = queue.shift(); + if (!currentDir) { + continue; + } + + let entries: Dirent[]; + try { + entries = await readdir(currentDir, { withFileTypes: true }); + } catch { + continue; + } + + for (const entry of entries) { + if (entry.isSymbolicLink()) { + continue; + } + + const absolutePath = path.join(currentDir, entry.name); + if (entry.isDirectory()) { + if (isIgnoredDirectory(entry.name)) { + continue; + } + queue.push(absolutePath); + continue; + } + + if (!entry.isFile() || !COMPOSE_FILE_NAMES.has(entry.name)) { + continue; + } + + const directoryPath = path.dirname(absolutePath); + const relativePath = path.relative(rootPath, absolutePath) || entry.name; + + composeFiles.push({ + absolutePath, + directoryPath, + projectName: path.basename(directoryPath), + relativePath, + }); + } + } + + return composeFiles.sort((left, right) => + left.relativePath.localeCompare(right.relativePath), + ); +} + +async function findDockerfiles(rootPath: string): Promise { + const queue: string[] = [rootPath]; + const dockerfiles: DockerfileSummary[] = []; + + while (queue.length > 0) { + const currentDir = queue.shift(); + if (!currentDir) { + continue; + } + + let entries: Dirent[]; + try { + entries = await readdir(currentDir, { withFileTypes: true }); + } catch { + continue; + } + + for (const entry of entries) { + if (entry.isSymbolicLink()) { + continue; + } + + const absolutePath = path.join(currentDir, entry.name); + if (entry.isDirectory()) { + if (isIgnoredDirectory(entry.name)) { + continue; + } + queue.push(absolutePath); + continue; + } + + if (!entry.isFile() || !isDockerfileName(entry.name)) { + continue; + } + + const directoryPath = path.dirname(absolutePath); + const relativePath = path.relative(rootPath, absolutePath) || entry.name; + + dockerfiles.push({ + absolutePath, + directoryPath, + name: entry.name, + relativePath, + }); + } + } + + return dockerfiles.sort((left, right) => + left.relativePath.localeCompare(right.relativePath), + ); +} + +function getWorkspaceRootPath(workspaceId: string): string { + const workspace = localDb + .select() + .from(workspaces) + .where(eq(workspaces.id, workspaceId)) + .get(); + + if (!workspace) { + throw new TRPCError({ + code: "NOT_FOUND", + message: `Workspace ${workspaceId} not found`, + }); + } + + const workspaceRoot = getWorkspacePath(workspace); + if (!workspaceRoot) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: "Workspace path is unavailable", + }); + } + + return workspaceRoot; +} + +async function resolveComposeFileForWorkspace( + workspaceId: string, + composeFilePath: string, +): Promise { + const workspaceRoot = getWorkspaceRootPath(workspaceId); + const composeFiles = await findComposeFiles(workspaceRoot); + const composeFile = composeFiles.find( + (entry) => entry.absolutePath === composeFilePath, + ); + + if (!composeFile) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Selected compose file does not belong to this workspace", + }); + } + + return composeFile; +} + +async function assertContainerBelongsToWorkspace( + workspaceId: string, + containerId: string, +): Promise { + const workspaceRoot = getWorkspaceRootPath(workspaceId); + const composeFiles = await findComposeFiles(workspaceRoot); + const composeFilePaths = new Set( + composeFiles.map((composeFile) => composeFile.absolutePath), + ); + + const stdout = await execDocker(["ps", "-a", "--format", "json"], { + cwd: workspaceRoot, + }); + const container = parseDockerPsJsonLines(stdout) + .map(mapContainerSummary) + .find((entry) => entry.id === containerId); + + if ( + !container || + !container.composeFilePaths.some((composeFilePath) => + composeFilePaths.has(composeFilePath), + ) + ) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Selected container does not belong to this workspace", + }); + } +} + +export const createDockerRouter = () => { + return router({ + getComposeFiles: publicProcedure + .input(z.object({ workspaceId: z.string() })) + .query(async ({ input }) => { + const workspaceRoot = getWorkspaceRootPath(input.workspaceId); + const [composeFiles, dockerfiles] = await Promise.all([ + findComposeFiles(workspaceRoot), + findDockerfiles(workspaceRoot), + ]); + return { + workspaceRoot, + composeFiles, + dockerfiles, + }; + }), + + list: publicProcedure + .input(z.object({ workspaceId: z.string() })) + .query(async ({ input }) => { + const workspaceRoot = getWorkspaceRootPath(input.workspaceId); + const [composeFiles, dockerfiles] = await Promise.all([ + findComposeFiles(workspaceRoot), + findDockerfiles(workspaceRoot), + ]); + + if (composeFiles.length === 0 && dockerfiles.length === 0) { + return { + composeFiles: [], + dockerfiles: [], + dockerAvailable: true, + dockerError: null, + workspaceRoot, + }; + } + + let containers: DockerContainerSummary[] = []; + let dockerAvailable = true; + let dockerError: string | null = null; + + try { + const stdout = await execDocker(["ps", "-a", "--format", "json"]); + containers = parseDockerPsJsonLines(stdout).map(mapContainerSummary); + } catch (error) { + dockerAvailable = false; + dockerError = + typeof error === "object" && + error !== null && + "stderr" in error && + typeof error.stderr === "string" && + error.stderr.trim().length > 0 + ? error.stderr.trim() + : error instanceof Error + ? error.message + : "Failed to read Docker containers"; + } + + return { + composeFiles: composeFiles.map((composeFile) => { + const matchingContainers = containers + .filter((container) => + container.composeFilePaths.includes(composeFile.absolutePath), + ) + .sort((left, right) => { + const leftRunning = left.state === "running" ? 0 : 1; + const rightRunning = right.state === "running" ? 0 : 1; + if (leftRunning !== rightRunning) + return leftRunning - rightRunning; + const leftKey = `${left.service ?? ""}:${left.name}`; + const rightKey = `${right.service ?? ""}:${right.name}`; + return leftKey.localeCompare(rightKey); + }); + + return { + ...composeFile, + containers: matchingContainers, + runningContainers: matchingContainers.filter( + (container) => container.state === "running", + ).length, + totalContainers: matchingContainers.length, + }; + }), + dockerfiles, + dockerAvailable, + dockerError, + workspaceRoot, + }; + }), + + startProject: publicProcedure + .input( + composeActionInput.extend({ + rebuild: z.boolean().optional(), + }), + ) + .mutation(async ({ input }) => { + const composeFile = await resolveComposeFileForWorkspace( + input.workspaceId, + input.composeFilePath, + ); + + try { + const args = ["compose", "-f", composeFile.absolutePath, "up", "-d"]; + if (input.rebuild) { + args.push("--build", "--force-recreate"); + } + await execDocker(args, { cwd: composeFile.directoryPath }); + return { success: true }; + } catch (error) { + normalizeExecError(error); + } + }), + + stopProject: publicProcedure + .input(composeActionInput) + .mutation(async ({ input }) => { + const composeFile = await resolveComposeFileForWorkspace( + input.workspaceId, + input.composeFilePath, + ); + + try { + await execDocker( + ["compose", "-f", composeFile.absolutePath, "stop"], + { + cwd: composeFile.directoryPath, + }, + ); + return { success: true }; + } catch (error) { + normalizeExecError(error); + } + }), + + removeProject: publicProcedure + .input(composeActionInput) + .mutation(async ({ input }) => { + const composeFile = await resolveComposeFileForWorkspace( + input.workspaceId, + input.composeFilePath, + ); + + try { + await execDocker( + ["compose", "-f", composeFile.absolutePath, "down"], + { + cwd: composeFile.directoryPath, + }, + ); + return { success: true }; + } catch (error) { + normalizeExecError(error); + } + }), + + startContainer: publicProcedure + .input(containerActionInput) + .mutation(async ({ input }) => { + try { + await assertContainerBelongsToWorkspace( + input.workspaceId, + input.containerId, + ); + await execDocker(["container", "start", input.containerId], { + cwd: getWorkspaceRootPath(input.workspaceId), + }); + return { success: true }; + } catch (error) { + normalizeExecError(error); + } + }), + + stopContainer: publicProcedure + .input(containerActionInput) + .mutation(async ({ input }) => { + try { + await assertContainerBelongsToWorkspace( + input.workspaceId, + input.containerId, + ); + await execDocker(["container", "stop", input.containerId], { + cwd: getWorkspaceRootPath(input.workspaceId), + }); + return { success: true }; + } catch (error) { + normalizeExecError(error); + } + }), + + restartContainer: publicProcedure + .input(containerActionInput) + .mutation(async ({ input }) => { + try { + await assertContainerBelongsToWorkspace( + input.workspaceId, + input.containerId, + ); + await execDocker(["container", "restart", input.containerId], { + cwd: getWorkspaceRootPath(input.workspaceId), + }); + return { success: true }; + } catch (error) { + normalizeExecError(error); + } + }), + + inspectContainer: publicProcedure + .input(containerActionInput) + .query(async ({ input }) => { + try { + await assertContainerBelongsToWorkspace( + input.workspaceId, + input.containerId, + ); + const stdout = await execDocker( + ["container", "inspect", "--format", "json", input.containerId], + { cwd: getWorkspaceRootPath(input.workspaceId) }, + ); + return JSON.parse(stdout) as unknown; + } catch (error) { + normalizeExecError(error); + } + }), + + buildDockerfile: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + dockerfilePath: z.string().min(1), + tag: z.string().min(1), + }), + ) + .mutation(async ({ input }) => { + const workspaceRoot = getWorkspaceRootPath(input.workspaceId); + const dockerfiles = await findDockerfiles(workspaceRoot); + const dockerfile = dockerfiles.find( + (entry) => entry.absolutePath === input.dockerfilePath, + ); + + if (!dockerfile) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Selected Dockerfile does not belong to this workspace", + }); + } + + try { + await execDocker( + ["build", "-f", dockerfile.absolutePath, "-t", input.tag, "."], + { cwd: dockerfile.directoryPath }, + ); + return { success: true }; + } catch (error) { + normalizeExecError(error); + } + }), + + removeDockerImage: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + imageTag: z.string().min(1), + }), + ) + .mutation(async ({ input }) => { + getWorkspaceRootPath(input.workspaceId); + + try { + await execDocker(["rmi", input.imageTag]); + return { success: true }; + } catch (error) { + normalizeExecError(error); + } + }), + }); +}; + +export type DockerRouter = ReturnType; diff --git a/apps/desktop/src/lib/trpc/routers/extensions/index.ts b/apps/desktop/src/lib/trpc/routers/extensions/index.ts new file mode 100644 index 00000000000..8eb190e6a38 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/extensions/index.ts @@ -0,0 +1,73 @@ +import type { BrowserWindow } from "electron"; +import { + getExtensionsWithToolbarInfo, + installExtension, + listExtensions, + toggleExtension, + uninstallExtension, +} from "main/lib/extensions/extension-manager"; +import { extensionPopupManager } from "main/lib/extensions/extension-popup-manager"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; + +export const createExtensionsRouter = ( + getWindow: () => BrowserWindow | null, +) => { + return router({ + list: publicProcedure.query(async () => { + return listExtensions(); + }), + + install: publicProcedure + .input(z.object({ input: z.string() })) + .mutation(async ({ input }) => { + return installExtension(input.input); + }), + + uninstall: publicProcedure + .input(z.object({ extensionId: z.string() })) + .mutation(async ({ input }) => { + await uninstallExtension(input.extensionId); + }), + + toggle: publicProcedure + .input(z.object({ extensionId: z.string(), enabled: z.boolean() })) + .mutation(async ({ input }) => { + return toggleExtension(input.extensionId, input.enabled); + }), + + listToolbarExtensions: publicProcedure.query(async () => { + return getExtensionsWithToolbarInfo(); + }), + + openPopup: publicProcedure + .input( + z.object({ + extensionId: z.string(), + popupPath: z.string(), + anchorRect: z.object({ + x: z.number(), + y: z.number(), + width: z.number(), + height: z.number(), + }), + }), + ) + .mutation(({ input }) => { + const window = getWindow(); + if (!window) return { success: false }; + extensionPopupManager.openPopup( + window, + input.extensionId, + input.popupPath, + input.anchorRect, + ); + return { success: true }; + }), + + closePopup: publicProcedure.mutation(() => { + extensionPopupManager.closePopup(); + return { success: true }; + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/external/index.ts b/apps/desktop/src/lib/trpc/routers/external/index.ts index 9de5daf3b47..226f0a4bc84 100644 --- a/apps/desktop/src/lib/trpc/routers/external/index.ts +++ b/apps/desktop/src/lib/trpc/routers/external/index.ts @@ -1,4 +1,5 @@ import fs from "node:fs"; +import { access, readFile, writeFile } from "node:fs/promises"; import nodePath from "node:path"; import { EXTERNAL_APPS, @@ -8,7 +9,13 @@ import { } from "@superset/local-db"; import { TRPCError } from "@trpc/server"; import { eq } from "drizzle-orm"; -import { clipboard, shell } from "electron"; +import { + BrowserWindow, + clipboard, + dialog, + type OpenDialogOptions, + shell, +} from "electron"; import { localDb } from "main/lib/local-db"; import { externalUrlLogLabel, isSafeExternalUrl } from "main/lib/safe-url"; import { z } from "zod"; @@ -41,9 +48,59 @@ async function withResolveGuard(fn: () => Promise | T): Promise { } const ExternalAppSchema = z.enum(EXTERNAL_APPS); +const FileFilterSchema = z.object({ + name: z.string(), + extensions: z.array(z.string()), +}); const nonEditorSet = new Set(NON_EDITOR_APPS); +function isMissingExternalAppError(error: unknown): boolean { + if (!(error instanceof Error)) return false; + return ( + error.message.includes("Unable to find application named") || + error.message.includes("Ensure the application is installed.") + ); +} + +function isMissingPathError(error: unknown): boolean { + return error instanceof Error && "code" in error && error.code === "ENOENT"; +} + +async function assertPathExists(filePath: string): Promise { + try { + await access(filePath); + } catch (error) { + // Missing paths are expected in stale UI selections and should not hit Sentry. + if (isMissingPathError(error)) { + throw new TRPCError({ + code: "NOT_FOUND", + message: `The file ${filePath} does not exist.`, + }); + } + throw error; + } +} + +function _normalizeOpenInAppError(error: unknown): never { + if (error instanceof TRPCError) { + throw error; + } + if (isMissingExternalAppError(error)) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: + error instanceof Error + ? error.message + : "Requested application is not available", + }); + } + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: error instanceof Error ? error.message : "Unknown error", + }); +} + /** Sets the global default editor if one hasn't been set yet. Skips non-editor apps. */ function ensureGlobalDefaultEditor(app: ExternalApp) { if (nonEditorSet.has(app)) return; @@ -103,7 +160,10 @@ async function openPathInApp( throw lastError; } - await shell.openPath(filePath); + const openError = await shell.openPath(filePath); + if (openError) { + throw new Error(openError); + } } /** @@ -146,6 +206,20 @@ export const createExternalRouter = () => { shell.showItemInFolder(input); }), + openInDefaultApp: publicProcedure + .input(z.string()) + .mutation(async ({ input }) => { + // Surface missing files as a typed user-facing error before invoking the shell. + await assertPathExists(input); + const openError = await shell.openPath(input); + if (openError) { + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: openError, + }); + } + }), + openInApp: publicProcedure .input( z.object({ @@ -195,6 +269,91 @@ export const createExternalRouter = () => { clipboard.writeText(input); }), + openTextFile: publicProcedure + .input( + z.object({ + title: z.string().optional(), + buttonLabel: z.string().optional(), + filters: z.array(FileFilterSchema).optional(), + }), + ) + .mutation(async ({ input }) => { + const window = BrowserWindow.getFocusedWindow(); + const options: OpenDialogOptions = { + title: input.title, + buttonLabel: input.buttonLabel, + filters: input.filters, + properties: ["openFile"], + }; + const result = window + ? await dialog.showOpenDialog(window, options) + : await dialog.showOpenDialog(options); + + if (result.canceled || result.filePaths.length === 0) { + return null; + } + + const filePath = result.filePaths[0]; + if (!filePath) { + return null; + } + + try { + const content = await readFile(filePath, "utf-8"); + return { + path: filePath, + content, + }; + } catch (error) { + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: `Failed to read file: ${filePath}`, + cause: error, + }); + } + }), + + saveTextFile: publicProcedure + .input( + z.object({ + title: z.string().optional(), + defaultPath: z.string().optional(), + buttonLabel: z.string().optional(), + filters: z.array(FileFilterSchema).optional(), + content: z.string(), + }), + ) + .mutation(async ({ input }) => { + const window = BrowserWindow.getFocusedWindow(); + const options = { + title: input.title, + defaultPath: input.defaultPath, + buttonLabel: input.buttonLabel, + filters: input.filters, + }; + const result = window + ? await dialog.showSaveDialog(window, options) + : await dialog.showSaveDialog(options); + + if (result.canceled || !result.filePath) { + return null; + } + + try { + await writeFile(result.filePath, input.content, "utf-8"); + } catch (error) { + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: `Failed to write file: ${result.filePath}`, + cause: error, + }); + } + + return { + path: result.filePath, + }; + }), + resolvePath: publicProcedure .input( z.object({ @@ -269,7 +428,15 @@ export const createExternalRouter = () => { // No preferred editor configured yet. // Fall back to OS default file handler so Cmd/Ctrl+click still works // even when Cursor (or any specific editor) isn't installed. - await shell.openPath(filePath); + // `shell.openPath` returns a non-empty string on failure instead of + // throwing — surface that so callers see a meaningful error. + const openError = await shell.openPath(filePath); + if (openError) { + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: `Failed to open file: ${openError}`, + }); + } return; } diff --git a/apps/desktop/src/lib/trpc/routers/filesystem/index.ts b/apps/desktop/src/lib/trpc/routers/filesystem/index.ts index 933b4dfaeeb..698ff4b9cc2 100644 --- a/apps/desktop/src/lib/trpc/routers/filesystem/index.ts +++ b/apps/desktop/src/lib/trpc/routers/filesystem/index.ts @@ -1,4 +1,9 @@ -import { toErrorMessage } from "@superset/workspace-fs/host"; +import type { FsContentMatch } from "@superset/workspace-fs/host"; +import { + toErrorMessage, + WorkspaceFsPathError, +} from "@superset/workspace-fs/host"; +import { TRPCError } from "@trpc/server"; import { observable } from "@trpc/server/observable"; import { z } from "zod"; import { publicProcedure, router } from "../.."; @@ -12,6 +17,95 @@ function isClosedStreamError(error: unknown): boolean { ); } +function getErrorCode(error: unknown): string | null { + if (!(error instanceof Error) || !("code" in error)) { + return null; + } + + return typeof error.code === "string" ? error.code : null; +} + +function throwFilesystemError(error: unknown): never { + if (error instanceof TRPCError) { + throw error; + } + + // Most filesystem failures here are expected user/state conditions, so normalize + // them to typed tRPC errors and reserve INTERNAL_SERVER_ERROR for real surprises. + if (error instanceof WorkspaceFsPathError) { + switch (error.code) { + case "OUTSIDE_ROOT": + case "INVALID_TARGET": + throw new TRPCError({ + code: "BAD_REQUEST", + message: error.message, + }); + case "SYMLINK_ESCAPE": + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: error.message, + }); + } + } + + const errorCode = getErrorCode(error); + if (errorCode === "ENOENT") { + throw new TRPCError({ + code: "NOT_FOUND", + message: toErrorMessage(error), + }); + } + if ( + errorCode === "EISDIR" || + errorCode === "ENOTDIR" || + errorCode === "ERR_FS_EISDIR" + ) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: toErrorMessage(error), + }); + } + if (errorCode === "EACCES" || errorCode === "EPERM") { + throw new TRPCError({ + code: "FORBIDDEN", + message: toErrorMessage(error), + }); + } + if (errorCode === "EEXIST") { + throw new TRPCError({ + code: "CONFLICT", + message: toErrorMessage(error), + }); + } + if ( + error instanceof Error && + error.message.includes("Path is outside workspace root") + ) { + // workspace-fs still emits a plain Error for some outside-root checks. + // Keep this router-side fallback until that package fully types the error. + throw new TRPCError({ + code: "BAD_REQUEST", + message: error.message, + }); + } + + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: toErrorMessage(error), + }); +} + +async function withFilesystemErrorBoundary( + operation: () => Promise, +): Promise { + try { + return await operation(); + } catch (error) { + // Keep every filesystem procedure aligned on the same error semantics. + throwFilesystemError(error); + } +} + const writeFileContentSchema = z.union([ z.string(), z.object({ @@ -28,6 +122,34 @@ type WatchPathEventBatch = { }>; }; +const searchContentInputSchema = z.object({ + workspaceId: z.string(), + query: z.string(), + includeHidden: z.boolean().optional(), + includePattern: z.string().optional(), + excludePattern: z.string().optional(), + limit: z.number().optional(), + isRegex: z.boolean().optional(), + caseSensitive: z.boolean().optional(), + wholeWord: z.boolean().optional(), + multiline: z.boolean().optional(), + scopeId: z.string().optional(), +}); + +const replaceContentInputSchema = z.object({ + workspaceId: z.string(), + query: z.string(), + replacement: z.string(), + includeHidden: z.boolean().optional(), + includePattern: z.string().optional(), + excludePattern: z.string().optional(), + isRegex: z.boolean().optional(), + caseSensitive: z.boolean().optional(), + wholeWord: z.boolean().optional(), + multiline: z.boolean().optional(), + paths: z.array(z.string()).optional(), +}); + export const createFilesystemRouter = () => { return router({ listDirectory: publicProcedure @@ -38,9 +160,11 @@ export const createFilesystemRouter = () => { }), ) .query(async ({ input }) => { - const service = getServiceForWorkspace(input.workspaceId); - return await service.listDirectory({ - absolutePath: input.absolutePath, + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + return await service.listDirectory({ + absolutePath: input.absolutePath, + }); }); }), @@ -55,22 +179,24 @@ export const createFilesystemRouter = () => { }), ) .query(async ({ input }) => { - const service = getServiceForWorkspace(input.workspaceId); - const result = await service.readFile({ - absolutePath: input.absolutePath, - offset: input.offset, - maxBytes: input.maxBytes, - encoding: input.encoding, - }); + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + const result = await service.readFile({ + absolutePath: input.absolutePath, + offset: input.offset, + maxBytes: input.maxBytes, + encoding: input.encoding, + }); - if (result.kind === "bytes") { - return { - ...result, - content: Buffer.from(result.content).toString("base64"), - }; - } + if (result.kind === "bytes") { + return { + ...result, + content: Buffer.from(result.content).toString("base64"), + }; + } - return result; + return result; + }); }), getMetadata: publicProcedure @@ -81,9 +207,11 @@ export const createFilesystemRouter = () => { }), ) .query(async ({ input }) => { - const service = getServiceForWorkspace(input.workspaceId); - return await service.getMetadata({ - absolutePath: input.absolutePath, + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + return await service.getMetadata({ + absolutePath: input.absolutePath, + }); }); }), @@ -108,18 +236,20 @@ export const createFilesystemRouter = () => { }), ) .mutation(async ({ input }) => { - const service = getServiceForWorkspace(input.workspaceId); - const content = - typeof input.content === "string" - ? input.content - : new Uint8Array(Buffer.from(input.content.data, "base64")); - - return await service.writeFile({ - absolutePath: input.absolutePath, - content, - encoding: input.encoding, - options: input.options, - precondition: input.precondition, + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + const content = + typeof input.content === "string" + ? input.content + : new Uint8Array(Buffer.from(input.content.data, "base64")); + + return await service.writeFile({ + absolutePath: input.absolutePath, + content, + encoding: input.encoding, + options: input.options, + precondition: input.precondition, + }); }); }), @@ -132,10 +262,12 @@ export const createFilesystemRouter = () => { }), ) .mutation(async ({ input }) => { - const service = getServiceForWorkspace(input.workspaceId); - return await service.createDirectory({ - absolutePath: input.absolutePath, - recursive: input.recursive, + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + return await service.createDirectory({ + absolutePath: input.absolutePath, + recursive: input.recursive, + }); }); }), @@ -148,10 +280,12 @@ export const createFilesystemRouter = () => { }), ) .mutation(async ({ input }) => { - const service = getServiceForWorkspace(input.workspaceId); - return await service.deletePath({ - absolutePath: input.absolutePath, - permanent: input.permanent, + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + return await service.deletePath({ + absolutePath: input.absolutePath, + permanent: input.permanent, + }); }); }), @@ -164,10 +298,12 @@ export const createFilesystemRouter = () => { }), ) .mutation(async ({ input }) => { - const service = getServiceForWorkspace(input.workspaceId); - return await service.movePath({ - sourceAbsolutePath: input.sourceAbsolutePath, - destinationAbsolutePath: input.destinationAbsolutePath, + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + return await service.movePath({ + sourceAbsolutePath: input.sourceAbsolutePath, + destinationAbsolutePath: input.destinationAbsolutePath, + }); }); }), @@ -180,10 +316,12 @@ export const createFilesystemRouter = () => { }), ) .mutation(async ({ input }) => { - const service = getServiceForWorkspace(input.workspaceId); - return await service.copyPath({ - sourceAbsolutePath: input.sourceAbsolutePath, - destinationAbsolutePath: input.destinationAbsolutePath, + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + return await service.copyPath({ + sourceAbsolutePath: input.sourceAbsolutePath, + destinationAbsolutePath: input.destinationAbsolutePath, + }); }); }), @@ -196,6 +334,9 @@ export const createFilesystemRouter = () => { includePattern: z.string().optional(), excludePattern: z.string().optional(), limit: z.number().optional(), + openFilePaths: z.array(z.string()).optional(), + recentFilePaths: z.array(z.string()).optional(), + scopeId: z.string().optional(), }), ) .query(async ({ input }) => { @@ -204,40 +345,166 @@ export const createFilesystemRouter = () => { return { matches: [] }; } - const service = getServiceForWorkspace(input.workspaceId); - return await service.searchFiles({ - query: trimmedQuery, - includeHidden: input.includeHidden, - includePattern: input.includePattern, - excludePattern: input.excludePattern, - limit: input.limit, + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + return await service.searchFiles({ + query: trimmedQuery, + includeHidden: input.includeHidden, + includePattern: input.includePattern, + excludePattern: input.excludePattern, + limit: input.limit, + openFilePaths: input.openFilePaths, + recentFilePaths: input.recentFilePaths, + scopeId: input.scopeId, + }); }); }), - searchContent: publicProcedure + warmupSearchIndex: publicProcedure .input( z.object({ workspaceId: z.string(), - query: z.string(), includeHidden: z.boolean().optional(), - includePattern: z.string().optional(), - excludePattern: z.string().optional(), - limit: z.number().optional(), }), ) + .mutation(async ({ input }) => { + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + return await service.warmupSearchIndex({ + includeHidden: input.includeHidden, + }); + }); + }), + + searchContent: publicProcedure + .input(searchContentInputSchema) .query(async ({ input }) => { const trimmedQuery = input.query.trim(); if (!trimmedQuery) { return { matches: [] }; } - const service = getServiceForWorkspace(input.workspaceId); - return await service.searchContent({ - query: trimmedQuery, - includeHidden: input.includeHidden, - includePattern: input.includePattern, - excludePattern: input.excludePattern, - limit: input.limit, + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + return await service.searchContent({ + query: trimmedQuery, + includeHidden: input.includeHidden, + includePattern: input.includePattern, + excludePattern: input.excludePattern, + limit: input.limit, + isRegex: input.isRegex, + caseSensitive: input.caseSensitive, + wholeWord: input.wholeWord, + multiline: input.multiline, + scopeId: input.scopeId, + }); + }); + }), + + replaceContent: publicProcedure + .input(replaceContentInputSchema) + .mutation(async ({ input }) => { + if (input.query.length === 0) { + return { + replacements: 0, + filesUpdated: 0, + updated: [], + conflicts: [], + failed: [], + }; + } + + return await withFilesystemErrorBoundary(async () => { + const service = getServiceForWorkspace(input.workspaceId); + return await service.replaceContent({ + query: input.query, + replacement: input.replacement, + includeHidden: input.includeHidden, + includePattern: input.includePattern, + excludePattern: input.excludePattern, + isRegex: input.isRegex, + caseSensitive: input.caseSensitive, + wholeWord: input.wholeWord, + multiline: input.multiline, + paths: input.paths, + }); + }); + }), + + searchContentStream: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + query: z.string(), + includeHidden: z.boolean().optional(), + includePattern: z.string().optional(), + excludePattern: z.string().optional(), + limit: z.number().optional(), + isRegex: z.boolean().optional(), + caseSensitive: z.boolean().optional(), + wholeWord: z.boolean().optional(), + multiline: z.boolean().optional(), + scopeId: z.string().optional(), + }), + ) + .subscription(({ input }) => { + return observable<{ match: FsContentMatch }>((emit) => { + const trimmed = input.query.trim(); + if (!trimmed) { + emit.complete(); + return () => {}; + } + + const service = getServiceForWorkspace(input.workspaceId); + let isDisposed = false; + const stream = service.searchContentStream({ + ...input, + query: trimmed, + }); + const iterator = stream[Symbol.asyncIterator](); + + const runCleanup = () => { + isDisposed = true; + void iterator.return?.().catch((error) => { + console.error( + "[filesystem/searchContentStream] Cleanup failed:", + { workspaceId: input.workspaceId, error }, + ); + }); + }; + + void (async () => { + try { + while (!isDisposed) { + const next = await iterator.next(); + if (next.done || isDisposed) { + if (!isDisposed) emit.complete(); + return; + } + try { + emit.next(next.value); + } catch (error) { + if (isClosedStreamError(error)) { + runCleanup(); + return; + } + throw error; + } + } + } catch (error) { + if (!isDisposed) { + try { + emit.error(error); + } catch { + // subscriber already gone; nothing else to do. + } + } + } + })(); + + return () => { + runCleanup(); + }; }); }), diff --git a/apps/desktop/src/lib/trpc/routers/github-metrics.ts b/apps/desktop/src/lib/trpc/routers/github-metrics.ts new file mode 100644 index 00000000000..decdf87d24b --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/github-metrics.ts @@ -0,0 +1,19 @@ +import { publicProcedure, router } from ".."; +import { getGitHubMetricsSnapshot } from "./workspaces/utils/github/github-metrics"; +import { getGitHubRateLimitState } from "./workspaces/utils/github/github-rate-limiter"; +import { githubSyncService } from "./workspaces/utils/github/github-sync-service"; + +export const createGitHubMetricsRouter = () => { + return router({ + getSnapshot: publicProcedure.query(() => { + return { + generatedAt: Date.now(), + rateLimit: getGitHubRateLimitState(), + syncService: githubSyncService.getDebugSnapshot(), + metrics: getGitHubMetricsSnapshot(), + }; + }), + }); +}; + +export type GitHubMetricsRouter = ReturnType; diff --git a/apps/desktop/src/lib/trpc/routers/index.ts b/apps/desktop/src/lib/trpc/routers/index.ts index 03534fcf43e..334cfdca459 100644 --- a/apps/desktop/src/lib/trpc/routers/index.ts +++ b/apps/desktop/src/lib/trpc/routers/index.ts @@ -1,59 +1,96 @@ import type { BrowserWindow } from "electron"; +import type { WindowManager } from "main/lib/window-manager"; +// Fork-local: TODO autonomous agent feature. +import { createTodoAgentRouter } from "main/todo-agent/trpc-router"; import { router } from ".."; +import { createAgentCommandExecutionRouter } from "./agent-command-execution"; +import { createAivisRouter } from "./aivis"; import { createAnalyticsRouter } from "./analytics"; import { createAuthRouter } from "./auth"; import { createAutoUpdateRouter } from "./auto-update"; import { createBrowserRouter } from "./browser/browser"; +import { createBrowserAutomationRouter } from "./browser-automation"; import { createBrowserHistoryRouter } from "./browser-history"; +import { createBrowserPermissionsRouter } from "./browser-permissions"; import { createCacheRouter } from "./cache"; import { createChangesRouter } from "./changes"; import { createChatRuntimeServiceRouter } from "./chat-runtime-service"; import { createChatServiceRouter } from "./chat-service"; import { createConfigRouter } from "./config"; +import { createDatabasesRouter } from "./databases"; +import { createDiagnosticsRouter } from "./diagnostics"; +import { createDockerRouter } from "./docker"; +import { createExtensionsRouter } from "./extensions"; import { createExternalRouter } from "./external"; import { createFilesystemRouter } from "./filesystem"; +import { createGitHubMetricsRouter } from "./github-metrics"; import { createHostServiceCoordinatorRouter } from "./host-service-coordinator"; +import { createLanguageServicesRouter } from "./language-services"; import { createMenuRouter } from "./menu"; import { createMigrationRouter } from "./migration"; import { createNotificationsRouter } from "./notifications"; import { createPermissionsRouter } from "./permissions"; import { createPortsRouter } from "./ports"; import { createProjectsRouter } from "./projects"; +import { createReferenceGraphRouter } from "./reference-graph"; import { createResourceMetricsRouter } from "./resource-metrics"; import { createRingtoneRouter } from "./ringtone"; +import { createServiceStatusRouter } from "./service-status"; import { createSettingsRouter } from "./settings"; +import { createTabTearoffRouter } from "./tab-tearoff"; import { createTerminalRouter } from "./terminal"; import { createUiStateRouter } from "./ui-state"; +import { createVibrancyRouter } from "./vibrancy"; +import { createVscodeExtensionsRouter } from "./vscode-extensions"; import { createWindowRouter } from "./window"; import { createWorkspacesRouter } from "./workspaces"; -export const createAppRouter = (getWindow: () => BrowserWindow | null) => { +export const createAppRouter = ( + getWindow: () => BrowserWindow | null, + wm: WindowManager, +) => { return router({ chatRuntimeService: createChatRuntimeServiceRouter(), chatService: createChatServiceRouter(), + agentCommandExecution: createAgentCommandExecutionRouter(), + aivis: createAivisRouter(), analytics: createAnalyticsRouter(), browser: createBrowserRouter(), + browserAutomation: createBrowserAutomationRouter(), browserHistory: createBrowserHistoryRouter(), + browserPermissions: createBrowserPermissionsRouter(), auth: createAuthRouter(), autoUpdate: createAutoUpdateRouter(), cache: createCacheRouter(), - window: createWindowRouter(getWindow), + window: createWindowRouter(getWindow, wm), projects: createProjectsRouter(getWindow), workspaces: createWorkspacesRouter(), terminal: createTerminalRouter(), changes: createChangesRouter(), filesystem: createFilesystemRouter(), + githubMetrics: createGitHubMetricsRouter(), notifications: createNotificationsRouter(), permissions: createPermissionsRouter(), ports: createPortsRouter(), resourceMetrics: createResourceMetricsRouter(), menu: createMenuRouter(), + languageServices: createLanguageServicesRouter(), + referenceGraph: createReferenceGraphRouter(), external: createExternalRouter(), settings: createSettingsRouter(), config: createConfigRouter(), + databases: createDatabasesRouter(), + diagnostics: createDiagnosticsRouter(), + docker: createDockerRouter(), uiState: createUiStateRouter(), ringtone: createRingtoneRouter(getWindow), + serviceStatus: createServiceStatusRouter(), hostServiceCoordinator: createHostServiceCoordinatorRouter(), + tabTearoff: createTabTearoffRouter(wm), + extensions: createExtensionsRouter(getWindow), + vibrancy: createVibrancyRouter(wm), + vscodeExtensions: createVscodeExtensionsRouter(), + todoAgent: createTodoAgentRouter(), migration: createMigrationRouter(), }); }; diff --git a/apps/desktop/src/lib/trpc/routers/language-services/index.ts b/apps/desktop/src/lib/trpc/routers/language-services/index.ts new file mode 100644 index 00000000000..0ee6640045e --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/language-services/index.ts @@ -0,0 +1,211 @@ +import { TRPCError } from "@trpc/server"; +import { observable } from "@trpc/server/observable"; +import { languageServiceManager } from "main/lib/language-services/manager"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; +import { getWorkspace } from "../workspaces/utils/db-helpers"; +import { getWorkspacePath } from "../workspaces/utils/worktree"; + +const languageServiceDocumentSchema = z.object({ + workspaceId: z.string(), + absolutePath: z.string(), + languageId: z.string(), + content: z.string(), + version: z.number().int().nonnegative(), +}); + +const languageServicePositionSchema = z.object({ + workspaceId: z.string(), + absolutePath: z.string(), + languageId: z.string(), + line: z.number().int().positive(), + column: z.number().int().positive(), + content: z.string().optional(), + version: z.number().int().nonnegative().optional(), +}); + +function resolveWorkspacePath(workspaceId: string): string { + const workspace = getWorkspace(workspaceId); + if (!workspace) { + throw new TRPCError({ + code: "NOT_FOUND", + message: `Workspace ${workspaceId} not found`, + }); + } + + const workspacePath = getWorkspacePath(workspace); + if (!workspacePath) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: `Workspace ${workspaceId} has no filesystem path`, + }); + } + + return workspacePath; +} + +async function syncLookupDocumentIfNeeded( + input: z.infer, +): Promise { + const workspacePath = resolveWorkspacePath(input.workspaceId); + if (input.content === undefined || input.version === undefined) { + return workspacePath; + } + + await languageServiceManager.syncDocument({ + workspaceId: input.workspaceId, + workspacePath, + absolutePath: input.absolutePath, + languageId: input.languageId, + content: input.content, + version: input.version, + }); + return workspacePath; +} + +export const createLanguageServicesRouter = () => { + return router({ + openDocument: publicProcedure + .input(languageServiceDocumentSchema) + .mutation(async ({ input }) => { + const workspacePath = resolveWorkspacePath(input.workspaceId); + await languageServiceManager.openDocument({ + ...input, + workspacePath, + }); + return { ok: true }; + }), + + changeDocument: publicProcedure + .input(languageServiceDocumentSchema) + .mutation(async ({ input }) => { + const workspacePath = resolveWorkspacePath(input.workspaceId); + await languageServiceManager.syncDocument({ + ...input, + workspacePath, + }); + return { ok: true }; + }), + + getHover: publicProcedure + .input(languageServicePositionSchema) + .query(async ({ input }) => { + const workspacePath = await syncLookupDocumentIfNeeded(input); + return await languageServiceManager.getHover({ + workspaceId: input.workspaceId, + workspacePath, + absolutePath: input.absolutePath, + languageId: input.languageId, + line: input.line, + column: input.column, + }); + }), + + getDefinition: publicProcedure + .input(languageServicePositionSchema) + .query(async ({ input }) => { + const workspacePath = await syncLookupDocumentIfNeeded(input); + return await languageServiceManager.getDefinition({ + workspaceId: input.workspaceId, + workspacePath, + absolutePath: input.absolutePath, + languageId: input.languageId, + line: input.line, + column: input.column, + }); + }), + + closeDocument: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + absolutePath: z.string(), + languageId: z.string(), + }), + ) + .mutation(async ({ input }) => { + const workspacePath = resolveWorkspacePath(input.workspaceId); + await languageServiceManager.closeDocument({ + ...input, + workspacePath, + }); + return { ok: true }; + }), + + refreshWorkspace: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + }), + ) + .mutation(async ({ input }) => { + const workspacePath = resolveWorkspacePath(input.workspaceId); + await languageServiceManager.refreshWorkspace({ + workspaceId: input.workspaceId, + workspacePath, + }); + return { ok: true }; + }), + + getWorkspaceDiagnostics: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + }), + ) + .query(({ input }) => { + const workspacePath = resolveWorkspacePath(input.workspaceId); + return languageServiceManager.getWorkspaceSnapshot({ + workspaceId: input.workspaceId, + workspacePath, + }); + }), + + getProviders: publicProcedure.query(() => { + return languageServiceManager.getProviders(); + }), + + setProviderEnabled: publicProcedure + .input( + z.object({ + providerId: z.string(), + enabled: z.boolean(), + }), + ) + .mutation(async ({ input }) => { + const provider = await languageServiceManager.setProviderEnabled( + input.providerId, + input.enabled, + ); + if (!provider) { + throw new TRPCError({ + code: "NOT_FOUND", + message: `Language service provider ${input.providerId} not found`, + }); + } + + return provider; + }), + + subscribeDiagnostics: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + }), + ) + .subscription(({ input }) => { + return observable<{ version: number }>((emit) => { + const unsubscribe = languageServiceManager.subscribeToWorkspace( + input.workspaceId, + (payload) => { + emit.next(payload); + }, + ); + + return () => { + unsubscribe(); + }; + }); + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/menu.ts b/apps/desktop/src/lib/trpc/routers/menu.ts index 7310d5f6a12..d775038d681 100644 --- a/apps/desktop/src/lib/trpc/routers/menu.ts +++ b/apps/desktop/src/lib/trpc/routers/menu.ts @@ -1,15 +1,18 @@ import { observable } from "@trpc/server/observable"; import { + type BrowserActionEvent, menuEmitter, type OpenSettingsEvent, type OpenWorkspaceEvent, type SettingsSection, } from "main/lib/menu-events"; +import type { BrowserShortcutAction } from "shared/browser-shortcuts"; import { publicProcedure, router } from ".."; type MenuEvent = | { type: "open-settings"; data: OpenSettingsEvent } - | { type: "open-workspace"; data: OpenWorkspaceEvent }; + | { type: "open-workspace"; data: OpenWorkspaceEvent } + | { type: "browser-action"; data: BrowserActionEvent }; export const createMenuRouter = () => { return router({ @@ -23,12 +26,18 @@ export const createMenuRouter = () => { emit.next({ type: "open-workspace", data: { workspaceId } }); }; + const onBrowserAction = (action: BrowserShortcutAction) => { + emit.next({ type: "browser-action", data: { action } }); + }; + menuEmitter.on("open-settings", onOpenSettings); menuEmitter.on("open-workspace", onOpenWorkspace); + menuEmitter.on("browser-action", onBrowserAction); return () => { menuEmitter.off("open-settings", onOpenSettings); menuEmitter.off("open-workspace", onOpenWorkspace); + menuEmitter.off("browser-action", onBrowserAction); }; }); }), diff --git a/apps/desktop/src/lib/trpc/routers/permissions.ts b/apps/desktop/src/lib/trpc/routers/permissions.ts index 16b9d7d4bc3..e747b327637 100644 --- a/apps/desktop/src/lib/trpc/routers/permissions.ts +++ b/apps/desktop/src/lib/trpc/routers/permissions.ts @@ -2,6 +2,7 @@ import fs from "node:fs"; import { homedir } from "node:os"; import path from "node:path"; import { shell, systemPreferences } from "electron"; +import { requestMediaAccess } from "lib/electron/request-media-access"; import { publicProcedure, router } from ".."; function checkFullDiskAccess(): boolean { @@ -30,6 +31,14 @@ function checkMicrophone(): boolean { } } +function checkCamera(): boolean { + try { + return systemPreferences.getMediaAccessStatus("camera") === "granted"; + } catch { + return false; + } +} + export const createPermissionsRouter = () => { return router({ getStatus: publicProcedure.query(() => { @@ -37,6 +46,7 @@ export const createPermissionsRouter = () => { fullDiskAccess: checkFullDiskAccess(), accessibility: checkAccessibility(), microphone: checkMicrophone(), + camera: checkCamera(), }; }), @@ -53,22 +63,11 @@ export const createPermissionsRouter = () => { }), requestMicrophone: publicProcedure.mutation(async () => { - try { - if (process.platform === "darwin") { - const granted = - await systemPreferences.askForMediaAccess("microphone"); - if (granted) { - return { granted: true }; - } - } - } catch { - // Fall through to opening System Settings. - } + return requestMediaAccess("microphone"); + }), - await shell.openExternal( - "x-apple.systempreferences:com.apple.preference.security?Privacy_Microphone", - ); - return { granted: false }; + requestCamera: publicProcedure.mutation(async () => { + return requestMediaAccess("camera"); }), requestAppleEvents: publicProcedure.mutation(async () => { diff --git a/apps/desktop/src/lib/trpc/routers/ports/ports.ts b/apps/desktop/src/lib/trpc/routers/ports/ports.ts index d4432be08a9..3f7b50e0072 100644 --- a/apps/desktop/src/lib/trpc/routers/ports/ports.ts +++ b/apps/desktop/src/lib/trpc/routers/ports/ports.ts @@ -1,6 +1,5 @@ import { workspaces } from "@superset/local-db"; import { observable } from "@trpc/server/observable"; -import { eq } from "drizzle-orm"; import { localDb } from "main/lib/local-db"; import { loadStaticPorts } from "main/lib/static-ports"; import { portManager } from "main/lib/terminal/port-manager"; @@ -24,30 +23,80 @@ function getLabelsForPath(worktreePath: string): Map | null { return labels; } +/** Cache structure for workspace path + labels lookup. */ +interface WorkspaceLabelInfo { + labels: Map | null; + workspaceId: string; +} + +function buildLabelCache(): Map { + const cache = new Map(); + const allWs = localDb.select().from(workspaces).all(); + + for (const ws of allWs) { + const wsPath = getWorkspacePath(ws); + if (!wsPath) continue; + const labels = getLabelsForPath(wsPath); + if (labels) { + cache.set(ws.id, { labels, workspaceId: ws.id }); + } + } + + return cache; +} + export const createPortsRouter = () => { return router({ getAll: publicProcedure.query((): EnrichedPort[] => { const detectedPorts = portManager.getAllPorts(); + const labelCache = buildLabelCache(); - const labelCache = new Map | null>(); - - return detectedPorts.map((port) => { - if (!labelCache.has(port.workspaceId)) { - const ws = localDb - .select() - .from(workspaces) - .where(eq(workspaces.id, port.workspaceId)) - .get(); - const wsPath = ws ? getWorkspacePath(ws) : null; - labelCache.set( - port.workspaceId, - wsPath ? getLabelsForPath(wsPath) : null, - ); - } + // Track which static ports have been matched with detected ports + // key: "workspaceId:port" + const matchedStaticPorts = new Set(); - const labels = labelCache.get(port.workspaceId); - return { ...port, label: labels?.get(port.port) ?? null }; + // Enrich detected ports with labels + const enriched: EnrichedPort[] = detectedPorts.map((port) => { + const info = labelCache.get(port.workspaceId); + const label = info?.labels?.get(port.port) ?? null; + if (label != null) { + matchedStaticPorts.add(`${port.workspaceId}:${port.port}`); + } + return { + port: port.port, + workspaceId: port.workspaceId, + label, + detected: true, + pid: port.pid, + processName: port.processName, + paneId: port.paneId, + detectedAt: port.detectedAt, + address: port.address, + }; }); + + // Add static ports that were NOT detected + for (const [wsId, info] of labelCache) { + if (!info.labels) continue; + for (const [portNum, label] of info.labels) { + const key = `${wsId}:${portNum}`; + if (matchedStaticPorts.has(key)) continue; + + enriched.push({ + port: portNum, + workspaceId: wsId, + label, + detected: false, + pid: null, + processName: null, + paneId: null, + detectedAt: null, + address: null, + }); + } + } + + return enriched; }), subscribe: publicProcedure.subscription(() => { diff --git a/apps/desktop/src/lib/trpc/routers/projects/projects.ts b/apps/desktop/src/lib/trpc/routers/projects/projects.ts index 169fa8cc7d4..7e233440f6d 100644 --- a/apps/desktop/src/lib/trpc/routers/projects/projects.ts +++ b/apps/desktop/src/lib/trpc/routers/projects/projects.ts @@ -1,3 +1,4 @@ +import { EventEmitter } from "node:events"; import { existsSync, statSync } from "node:fs"; import { access, mkdir, rm } from "node:fs/promises"; import { basename, join } from "node:path"; @@ -12,6 +13,7 @@ import { worktrees, } from "@superset/local-db"; import { TRPCError } from "@trpc/server"; +import { observable } from "@trpc/server/observable"; import { and, desc, eq, inArray, isNotNull, isNull, not } from "drizzle-orm"; import type { BrowserWindow } from "electron"; import { dialog } from "electron"; @@ -23,6 +25,7 @@ import { } from "main/lib/project-icons"; import { getWorkspaceRuntimeRegistry } from "main/lib/workspace-runtime"; import { PROJECT_COLOR_VALUES } from "shared/constants/project-colors"; +import type { SimpleGitProgressEvent } from "simple-git"; import { z } from "zod"; import { publicProcedure, router } from "../.."; import { resolveDefaultEditor } from "../external"; @@ -42,7 +45,10 @@ import { refreshDefaultBranch, sanitizeAuthorPrefix, } from "../workspaces/utils/git"; -import { getSimpleGitWithShellPath } from "../workspaces/utils/git-client"; +import { + createSimpleGitWithShellPath, + getSimpleGitWithShellPath, +} from "../workspaces/utils/git-client"; import { execWithShellEnv } from "../workspaces/utils/shell-env"; import { getDefaultProjectColor } from "./utils/colors"; import { discoverAndSaveProjectIcon } from "./utils/favicon-discovery"; @@ -171,6 +177,28 @@ function upsertProject(mainRepoPath: string, defaultBranch: string): Project { return project; } +async function ensureProjectGitHubOwner(project: Project): Promise { + if (project.githubOwner) { + return project; + } + + const githubOwner = await fetchGitHubOwner(project.mainRepoPath); + if (!githubOwner) { + return project; + } + + localDb + .update(projects) + .set({ githubOwner }) + .where(eq(projects.id, project.id)) + .run(); + + return { + ...project, + githubOwner, + }; +} + async function ensureMainWorkspace(project: Project): Promise { const existingBranchWorkspace = getBranchWorkspace(project.id); @@ -301,6 +329,119 @@ function extractRepoName(urlInput: string): string | null { return repoSegment; } +interface CloneEventBase { + cloneId: string; + /** Monotonic sequence number per cloneId, used by subscribers to dedupe. */ + seq: number; + time: number; +} + +export type CloneProgressEvent = + | (CloneEventBase & { + type: "log"; + message: string; + level: "info" | "warn" | "error"; + }) + | (CloneEventBase & { + type: "progress"; + stage: string; + progress: number; + processed: number; + total: number; + }) + | (CloneEventBase & { type: "done" }) + | (CloneEventBase & { type: "error"; message: string }) + | (CloneEventBase & { type: "canceled" }); + +const cloneEventBus = new EventEmitter(); +cloneEventBus.setMaxListeners(0); +const cloneAbortControllers = new Map(); + +/** + * Per-cloneId replay buffer. The tRPC subscription is established after the + * mutation is fired from the client, so events emitted in the window between + * `cloneRepo` starting and the subscription connecting would otherwise be + * lost. The buffer is flushed to the first subscriber and trimmed on a short + * timeout after any terminal event (done / error / canceled). + */ +const cloneEventBuffers = new Map(); +const cloneBufferEvictTimers = new Map(); +const cloneSeqCounters = new Map(); +const MAX_BUFFERED_EVENTS = 1000; +const TERMINAL_BUFFER_EVICT_MS = 30_000; + +function isTerminalCloneEvent(event: CloneProgressEvent): boolean { + return ( + event.type === "done" || event.type === "error" || event.type === "canceled" + ); +} + +function nextCloneSeq(cloneId: string): number { + const next = (cloneSeqCounters.get(cloneId) ?? 0) + 1; + cloneSeqCounters.set(cloneId, next); + return next; +} + +// Distributive omit preserves the discriminated-union shape so callers can +// still pass type-specific fields (`message`, `stage`, …) without TS +// collapsing everything to the common intersection. +type DistributiveOmit = T extends unknown + ? Omit + : never; +type CloneEventInput = DistributiveOmit; + +function emitCloneEvent(input: CloneEventInput) { + const event = { + ...input, + seq: nextCloneSeq(input.cloneId), + } as CloneProgressEvent; + let buffer = cloneEventBuffers.get(event.cloneId); + if (!buffer) { + buffer = []; + cloneEventBuffers.set(event.cloneId, buffer); + } + buffer.push(event); + if (buffer.length > MAX_BUFFERED_EVENTS) { + buffer.splice(0, buffer.length - MAX_BUFFERED_EVENTS); + } + cloneEventBus.emit(event.cloneId, event); + + if (isTerminalCloneEvent(event)) { + const existing = cloneBufferEvictTimers.get(event.cloneId); + if (existing) clearTimeout(existing); + const timer = setTimeout(() => { + cloneEventBuffers.delete(event.cloneId); + cloneBufferEvictTimers.delete(event.cloneId); + cloneSeqCounters.delete(event.cloneId); + }, TERMINAL_BUFFER_EVICT_MS); + cloneBufferEvictTimers.set(event.cloneId, timer); + } +} + +function emitCloneLog( + cloneId: string, + message: string, + level: "info" | "warn" | "error" = "info", +) { + emitCloneEvent({ + type: "log", + cloneId, + message, + level, + time: Date.now(), + }); +} + +/** + * Strip `userinfo` (credentials embedded in URLs such as + * `https://token@host/...` or `https://user:pass@host/...`) so that PATs and + * basic-auth tokens never reach the renderer via progress logs or error + * messages. Applied to every string emitted through the clone event bus. + */ +function redactGitCredentials(value: string): string { + return value.replace(/\/\/([^/\s@]+)(?::[^/\s@]*)?@/g, "//***@"); +} + /** Create the tRPC router for project CRUD, branch listing, and git operations. */ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { return router({ @@ -1070,7 +1211,9 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { const mainRepoPath = await getGitRoot(selectedPath); const defaultBranch = await getDefaultBranch(mainRepoPath); - const project = upsertProject(mainRepoPath, defaultBranch); + const project = await ensureProjectGitHubOwner( + upsertProject(mainRepoPath, defaultBranch), + ); await ensureMainWorkspace(project); track("project_opened", { @@ -1142,7 +1285,9 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { const defaultBranch = await getDefaultBranch(mainRepoPath); - const project = upsertProject(mainRepoPath, defaultBranch); + const project = await ensureProjectGitHubOwner( + upsertProject(mainRepoPath, defaultBranch), + ); await ensureMainWorkspace(project); track("project_opened", { @@ -1161,7 +1306,9 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { .mutation(async ({ input }) => { const { defaultBranch } = await initGitRepo(input.path); - const project = upsertProject(input.path, defaultBranch); + const project = await ensureProjectGitHubOwner( + upsertProject(input.path, defaultBranch), + ); await ensureMainWorkspace(project); track("project_opened", { @@ -1172,6 +1319,53 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { return { project }; }), + cloneProgress: publicProcedure + .input(z.object({ cloneId: z.string().min(1) })) + .subscription(({ input }) => { + return observable((emit) => { + // Dedupe by monotonic seq so that we can safely attach the live + // listener first and then replay the buffer: any event that + // reaches both paths only passes the `> lastSeq` guard once. + let lastSeq = 0; + const deliver = (event: CloneProgressEvent) => { + if (event.seq <= lastSeq) return; + lastSeq = event.seq; + emit.next(event); + }; + const handler = (event: CloneProgressEvent) => { + deliver(event); + }; + cloneEventBus.on(input.cloneId, handler); + const buffered = cloneEventBuffers.get(input.cloneId); + if (buffered) { + for (const event of buffered) { + deliver(event); + } + } + return () => { + cloneEventBus.off(input.cloneId, handler); + }; + }); + }), + + cancelClone: publicProcedure + .input(z.object({ cloneId: z.string().min(1) })) + .mutation(({ input }) => { + const controller = cloneAbortControllers.get(input.cloneId); + if (!controller) { + return { canceled: false as const }; + } + controller.abort(); + cloneAbortControllers.delete(input.cloneId); + emitCloneLog(input.cloneId, "Clone canceled by user", "warn"); + emitCloneEvent({ + type: "canceled", + cloneId: input.cloneId, + time: Date.now(), + }); + return { canceled: true as const }; + }), + cloneRepo: publicProcedure .input( z.object({ @@ -1195,6 +1389,7 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { .trim() .optional() .transform((v) => (v && v.length > 0 ? v : undefined)), + cloneId: z.string().min(1).optional(), }), ) .mutation(async ({ input }) => { @@ -1252,12 +1447,14 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { .where(eq(projects.id, existingProject.id)) .run(); - // Auto-create main workspace if it doesn't exist - await ensureMainWorkspace({ + const hydratedProject = await ensureProjectGitHubOwner({ ...existingProject, lastOpenedAt: Date.now(), }); + // Auto-create main workspace if it doesn't exist + await ensureMainWorkspace(hydratedProject); + track("project_opened", { project_id: existingProject.id, method: "clone", @@ -1266,7 +1463,7 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { return { canceled: false as const, success: true as const, - project: { ...existingProject, lastOpenedAt: Date.now() }, + project: hydratedProject, }; } catch { // Directory is missing - remove the stale project record and continue with clone @@ -1286,23 +1483,87 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { }; } - // Clone the repository - const git = await getSimpleGitWithShellPath(); - await git.clone(input.url, clonePath); + // Clone the repository (with streaming progress when cloneId given) + const cloneId = input.cloneId; + if (cloneId) { + const abortController = new AbortController(); + cloneAbortControllers.set(cloneId, abortController); + emitCloneLog( + cloneId, + `Preparing clone into ${basename(clonePath)}`, + ); + try { + const gitWithProgress = await createSimpleGitWithShellPath({ + abort: abortController.signal, + progress: (event: SimpleGitProgressEvent) => { + emitCloneEvent({ + type: "progress", + cloneId, + stage: event.stage, + progress: event.progress, + processed: event.processed, + total: event.total, + time: Date.now(), + }); + }, + }); + emitCloneLog( + cloneId, + `Cloning ${redactGitCredentials(input.url)}`, + ); + await gitWithProgress.clone(input.url, clonePath); + emitCloneLog(cloneId, "Clone finished, preparing project"); + } catch (cloneError) { + const message = redactGitCredentials( + cloneError instanceof Error + ? cloneError.message + : String(cloneError), + ); + // `git clone` creates the destination directory eagerly; + // leaving a partial checkout behind would block every retry + // against the same path via the existing-folder guard above. + await rm(clonePath, { recursive: true, force: true }).catch( + () => undefined, + ); + if (!abortController.signal.aborted) { + emitCloneEvent({ + type: "error", + cloneId, + message, + time: Date.now(), + }); + } + throw cloneError; + } finally { + cloneAbortControllers.delete(cloneId); + } + } else { + const git = await getSimpleGitWithShellPath(); + try { + await git.clone(input.url, clonePath); + } catch (cloneError) { + await rm(clonePath, { recursive: true, force: true }).catch( + () => undefined, + ); + throw cloneError; + } + } // Create new project const name = basename(clonePath); const defaultBranch = await getDefaultBranch(clonePath); - const project = localDb - .insert(projects) - .values({ - mainRepoPath: clonePath, - name, - color: getDefaultProjectColor(), - defaultBranch, - }) - .returning() - .get(); + const project = await ensureProjectGitHubOwner( + localDb + .insert(projects) + .values({ + mainRepoPath: clonePath, + name, + color: getDefaultProjectColor(), + defaultBranch, + }) + .returning() + .get(), + ); // Auto-create main workspace if it doesn't exist await ensureMainWorkspace(project); @@ -1312,14 +1573,43 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { method: "clone", }); + if (input.cloneId) { + emitCloneEvent({ + type: "done", + cloneId: input.cloneId, + time: Date.now(), + }); + } + return { canceled: false as const, success: true as const, project, }; } catch (error) { - const errorMessage = - error instanceof Error ? error.message : String(error); + const errorMessage = redactGitCredentials( + error instanceof Error ? error.message : String(error), + ); + // Surface post-clone failures (getDefaultBranch / DB insert / + // ensureMainWorkspace / etc) to any streaming subscriber, unless + // the git clone step itself already emitted an error event. + if (input.cloneId) { + const buffered = cloneEventBuffers.get(input.cloneId); + const hasTerminal = buffered?.some( + (event) => + event.type === "error" || + event.type === "canceled" || + event.type === "done", + ); + if (!hasTerminal) { + emitCloneEvent({ + type: "error", + cloneId: input.cloneId, + message: errorMessage, + time: Date.now(), + }); + } + } return { canceled: false as const, success: false as const, @@ -1365,7 +1655,9 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { await rm(repoPath, { recursive: true, force: true }); throw gitErr; } - const project = upsertProject(repoPath, defaultBranch); + const project = await ensureProjectGitHubOwner( + upsertProject(repoPath, defaultBranch), + ); await ensureMainWorkspace(project); track("project_opened", { @@ -1408,6 +1700,8 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { worktreeBaseDir: z.string().nullable().optional(), hideImage: z.boolean().optional(), defaultApp: z.enum(EXTERNAL_APPS).nullable().optional(), + autoImportExternalWorktrees: z.boolean().nullable().optional(), + autoRemoveMissingWorktrees: z.boolean().nullable().optional(), }), }), ) @@ -1446,6 +1740,14 @@ export const createProjectsRouter = (getWindow: () => BrowserWindow | null) => { ...(input.patch.defaultApp !== undefined && { defaultApp: input.patch.defaultApp, }), + ...(input.patch.autoImportExternalWorktrees !== undefined && { + autoImportExternalWorktrees: + input.patch.autoImportExternalWorktrees, + }), + ...(input.patch.autoRemoveMissingWorktrees !== undefined && { + autoRemoveMissingWorktrees: + input.patch.autoRemoveMissingWorktrees, + }), lastOpenedAt: Date.now(), }) .where(eq(projects.id, input.id)) diff --git a/apps/desktop/src/lib/trpc/routers/projects/utils/github.ts b/apps/desktop/src/lib/trpc/routers/projects/utils/github.ts index 506edc50ec0..253306ae0aa 100644 --- a/apps/desktop/src/lib/trpc/routers/projects/utils/github.ts +++ b/apps/desktop/src/lib/trpc/routers/projects/utils/github.ts @@ -1,5 +1,24 @@ +import { execGitWithShellPath } from "../../workspaces/utils/git-client"; import { execWithShellEnv } from "../../workspaces/utils/shell-env"; +function parseGitHubOwnerFromRemoteUrl(remoteUrl: string): string | null { + const trimmed = remoteUrl.trim(); + const patterns = [ + /^git@github\.com:(?[^/]+)\/[^/]+?(?:\.git)?$/, + /^ssh:\/\/git@github\.com\/(?[^/]+)\/[^/]+?(?:\.git)?$/, + /^https:\/\/github\.com\/(?[^/]+)\/[^/]+?(?:\.git)?\/?$/, + ]; + + for (const pattern of patterns) { + const match = pattern.exec(trimmed); + if (match?.groups?.owner) { + return match.groups.owner; + } + } + + return null; +} + /** * Fetches the GitHub owner (user or org) for a repository using the `gh` CLI. * Returns null if `gh` is not installed, not authenticated, or on error. @@ -7,6 +26,21 @@ import { execWithShellEnv } from "../../workspaces/utils/shell-env"; export async function fetchGitHubOwner( repoPath: string, ): Promise { + try { + const { stdout } = await execGitWithShellPath( + ["remote", "get-url", "origin"], + { + cwd: repoPath, + }, + ); + const owner = parseGitHubOwnerFromRemoteUrl(stdout); + if (owner) { + return owner; + } + } catch { + // Fall back to gh when no origin remote exists or the remote is not GitHub. + } + try { const { stdout } = await execWithShellEnv( "gh", diff --git a/apps/desktop/src/lib/trpc/routers/reference-graph/index.ts b/apps/desktop/src/lib/trpc/routers/reference-graph/index.ts new file mode 100644 index 00000000000..4fc5376d5ab --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/reference-graph/index.ts @@ -0,0 +1,74 @@ +import path from "node:path"; +import { TRPCError } from "@trpc/server"; +import { buildReferenceGraph } from "main/lib/reference-graph"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; +import { getWorkspace } from "../workspaces/utils/db-helpers"; +import { getWorkspacePath } from "../workspaces/utils/worktree"; + +function resolveWorkspacePath(workspaceId: string): string { + const workspace = getWorkspace(workspaceId); + if (!workspace) { + throw new TRPCError({ + code: "NOT_FOUND", + message: `Workspace ${workspaceId} not found`, + }); + } + + const workspacePath = getWorkspacePath(workspace); + if (!workspacePath) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: `Workspace ${workspaceId} has no filesystem path`, + }); + } + + return workspacePath; +} + +export const createReferenceGraphRouter = () => { + return router({ + buildGraph: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + absolutePath: z.string(), + languageId: z.string(), + line: z.number().int().positive(), + column: z.number().int().positive(), + maxDepth: z.number().int().min(1).max(10).optional(), + maxNodes: z.number().int().min(1).max(500).optional(), + excludePatterns: z.array(z.string()).optional(), + }), + ) + .mutation(async ({ input }) => { + const workspacePath = resolveWorkspacePath(input.workspaceId); + + // Ensure absolutePath is within the workspace (prevent path traversal) + const resolved = path.resolve(input.absolutePath); + if ( + !resolved.startsWith(workspacePath + path.sep) && + resolved !== workspacePath + ) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "absolutePath must be within the workspace", + }); + } + + const graph = await buildReferenceGraph({ + workspaceId: input.workspaceId, + workspacePath, + absolutePath: input.absolutePath, + languageId: input.languageId, + line: input.line, + column: input.column, + maxDepth: input.maxDepth, + maxNodes: input.maxNodes, + excludePatterns: input.excludePatterns, + }); + + return graph; + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/ringtone/index.ts b/apps/desktop/src/lib/trpc/routers/ringtone/index.ts index 699cba29806..e84cf44d069 100644 --- a/apps/desktop/src/lib/trpc/routers/ringtone/index.ts +++ b/apps/desktop/src/lib/trpc/routers/ringtone/index.ts @@ -1,14 +1,36 @@ import type { ChildProcess } from "node:child_process"; +import { randomUUID } from "node:crypto"; import { TRPCError } from "@trpc/server"; +import { observable } from "@trpc/server/observable"; import type { BrowserWindow, OpenDialogOptions } from "electron"; import { dialog } from "electron"; import { + deleteCustomRingtone, + getCustomRingtoneEditState, getCustomRingtoneInfo, getCustomRingtonePath, + getCustomRingtoneSourcePath, importCustomRingtoneFromPath, + setCustomRingtoneDisplayName, } from "main/lib/custom-ringtones"; import { playSoundFile } from "main/lib/play-sound"; import { getSoundPath } from "main/lib/sound-paths"; +import { + getTempAudioPath, + registerTempAudio, + unregisterTempAudio, +} from "main/lib/temp-audio-protocol"; +import { + checkMissingBinaries, + cleanupTempAudio, + downloadFullYouTubeAudio, + getBufferedInstallEvents, + type InstallProgressEvent, + importRingtoneFromYouTube, + installMissingBinaries, + subscribeInstallEvents, + YouTubeRingtoneError, +} from "main/lib/youtube-ringtone"; import { CUSTOM_RINGTONE_ID, getRingtoneFilename, @@ -170,6 +192,294 @@ export const createRingtoneRouter = (getWindow: () => BrowserWindow | null) => { }); } }), + + /** + * Deletes the imported custom ringtone (audio file + metadata). + */ + deleteCustom: publicProcedure.mutation(() => { + stopCurrentSound(); + deleteCustomRingtone(); + return { success: true as const }; + }), + + /** + * Renames the custom ringtone's display name. + */ + renameCustom: publicProcedure + .input(z.object({ name: z.string().min(1).max(80) })) + .mutation(({ input }) => { + try { + setCustomRingtoneDisplayName(input.name); + const info = getCustomRingtoneInfo(); + if (!info) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "No custom ringtone to rename.", + }); + } + return { ringtone: info }; + } catch (error) { + if (error instanceof TRPCError) throw error; + throw new TRPCError({ + code: "BAD_REQUEST", + message: + error instanceof Error + ? error.message + : "Failed to rename custom ringtone", + }); + } + }), + + /** + * Check which required binaries (yt-dlp, ffmpeg) are missing. + */ + checkBinaries: publicProcedure.query(async () => { + const missing = await checkMissingBinaries(); + return { missing }; + }), + + /** + * Install yt-dlp and ffmpeg via Homebrew (macOS only). + * Log events are streamed via `installProgress` subscription keyed on installId. + */ + installBinaries: publicProcedure + .input(z.object({ installId: z.string().min(1) })) + .mutation(async ({ input }) => { + try { + await installMissingBinaries(input.installId); + return { success: true as const }; + } catch (error) { + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: + error instanceof Error + ? error.message + : "Failed to install dependencies", + }); + } + }), + + /** + * Subscribe to install progress events for a given installId. + * Emits buffered events on connect (replay) plus live events. + */ + installProgress: publicProcedure + .input(z.object({ installId: z.string().min(1) })) + .subscription(({ input }) => { + return observable((emit) => { + let lastSeq = 0; + const deliver = (event: InstallProgressEvent) => { + if (event.seq <= lastSeq) return; + lastSeq = event.seq; + emit.next(event); + }; + const unsubscribe = subscribeInstallEvents(input.installId, deliver); + for (const event of getBufferedInstallEvents(input.installId)) { + deliver(event); + } + return () => { + unsubscribe(); + }; + }); + }), + + /** + * Download the full audio from a YouTube URL to a temp file. + * Returns a tempId for use with the superset-temp-audio protocol and video metadata. + */ + downloadYouTubeAudio: publicProcedure + .input(z.object({ url: z.string().min(1) })) + .mutation(async ({ input }) => { + try { + const result = await downloadFullYouTubeAudio(input.url); + return { + tempId: result.tempId, + info: result.info, + }; + } catch (error) { + if (error instanceof YouTubeRingtoneError) { + throw new TRPCError({ + code: + error.code === "BINARY_MISSING" || + error.code === "TIMEOUT" || + error.code === "VIDEO_TOO_LONG" + ? "PRECONDITION_FAILED" + : "BAD_REQUEST", + message: error.message, + }); + } + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: + error instanceof Error + ? error.message + : "Failed to download YouTube audio", + }); + } + }), + + /** + * Clean up a previously downloaded temp audio file. + */ + cleanupTempAudio: publicProcedure + .input(z.object({ tempId: z.string() })) + .mutation(async ({ input }) => { + await cleanupTempAudio(input.tempId); + return { success: true as const }; + }), + + /** + * Returns the saved edit parameters for the current custom ringtone, + * or null if the ringtone was not produced by the clip editor. + */ + getCustomEditState: publicProcedure.query(() => { + return getCustomRingtoneEditState(); + }), + + /** + * Registers the saved source audio with the temp-audio protocol so + * the clip editor can stream it for preview & waveform. Returns + * `null` if no source is available (user can only re-import fresh). + */ + openCustomSource: publicProcedure.mutation(() => { + const sourcePath = getCustomRingtoneSourcePath(); + if (!sourcePath) { + return { tempId: null as string | null }; + } + const tempId = randomUUID(); + registerTempAudio(tempId, sourcePath); + return { tempId }; + }), + + /** + * Release the temp-audio registration returned by `openCustomSource`. + * Does NOT delete the persisted source file. + */ + closeCustomSource: publicProcedure + .input(z.object({ tempId: z.string() })) + .mutation(({ input }) => { + unregisterTempAudio(input.tempId); + return { success: true as const }; + }), + + /** + * Re-produce the custom ringtone by re-running the ffmpeg clip + * pipeline on the saved source audio with new parameters. + */ + reEditCustom: publicProcedure + .input( + z.object({ + startSeconds: z + .number() + .min(0) + .max(60 * 60 * 12), + endSeconds: z + .number() + .min(0) + .max(60 * 60 * 12), + displayName: z.string().max(120).optional(), + fadeInSeconds: z.number().min(0).max(10).optional(), + fadeOutSeconds: z.number().min(0).max(10).optional(), + playbackRate: z.number().min(0.5).max(2.0).optional(), + }), + ) + .mutation(async ({ input }) => { + const sourcePath = getCustomRingtoneSourcePath(); + if (!sourcePath) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: + "This ringtone has no saved source audio. Re-import from YouTube to enable editing.", + }); + } + const existingInfo = getCustomRingtoneInfo(); + const existingEditState = getCustomRingtoneEditState(); + try { + const ringtone = await importRingtoneFromYouTube({ + url: existingEditState?.sourceUrl ?? "", + startSeconds: input.startSeconds, + endSeconds: input.endSeconds, + displayName: input.displayName, + thumbnailUrl: existingInfo?.thumbnailUrl, + fadeInSeconds: input.fadeInSeconds, + fadeOutSeconds: input.fadeOutSeconds, + playbackRate: input.playbackRate, + tempFilePath: sourcePath, + sourceTitle: existingEditState?.sourceTitle, + }); + return { ringtone }; + } catch (error) { + if (error instanceof YouTubeRingtoneError) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: error.message, + }); + } + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: + error instanceof Error + ? error.message + : "Failed to re-edit custom ringtone", + }); + } + }), + + /** + * Imports a custom ringtone by clipping a section of a YouTube video. + * Requires `yt-dlp` and `ffmpeg` to be installed on the user's machine. + */ + importFromYouTube: publicProcedure + .input( + z.object({ + url: z.string().min(1), + startSeconds: z + .number() + .min(0) + .max(60 * 60 * 12), + endSeconds: z + .number() + .min(0) + .max(60 * 60 * 12), + displayName: z.string().max(120).optional(), + thumbnailUrl: z.string().max(2048).optional(), + fadeInSeconds: z.number().min(0).max(10).optional(), + fadeOutSeconds: z.number().min(0).max(10).optional(), + playbackRate: z.number().min(0.5).max(2.0).optional(), + /** Client-side tempId from downloadYouTubeAudio – resolved to path server-side */ + tempId: z.string().optional(), + sourceTitle: z.string().max(400).optional(), + }), + ) + .mutation(async ({ input }) => { + try { + const tempFilePath = input.tempId + ? (getTempAudioPath(input.tempId) ?? undefined) + : undefined; + const ringtone = await importRingtoneFromYouTube({ + ...input, + tempFilePath, + }); + return { ringtone }; + } catch (error) { + if (error instanceof YouTubeRingtoneError) { + throw new TRPCError({ + code: + error.code === "BINARY_MISSING" || error.code === "TIMEOUT" + ? "PRECONDITION_FAILED" + : "BAD_REQUEST", + message: error.message, + }); + } + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: + error instanceof Error + ? error.message + : "Failed to import YouTube ringtone", + }); + } + }), }); }; diff --git a/apps/desktop/src/lib/trpc/routers/service-status.ts b/apps/desktop/src/lib/trpc/routers/service-status.ts new file mode 100644 index 00000000000..442b3253038 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/service-status.ts @@ -0,0 +1,30 @@ +import { observable } from "@trpc/server/observable"; +import { serviceStatusService } from "main/lib/service-status"; +import type { ServiceStatusSnapshot } from "shared/service-status-types"; +import { publicProcedure, router } from ".."; + +export const createServiceStatusRouter = () => { + return router({ + // No `getAll` query: the subscription emits the current state for every + // snapshot on connect, so the client gets the initial value without a + // separate round-trip (and we avoid the staleTime-Infinity / subscription + // race where the query would later clobber fresh subscription data). + onChange: publicProcedure.subscription(() => { + return observable((emit) => { + // Register the listener BEFORE emitting the initial snapshots so + // that a `change` fired between the two steps (e.g. a polling + // cycle completing while we iterate) isn't lost. + const onChange = (snapshot: ServiceStatusSnapshot) => { + emit.next(snapshot); + }; + serviceStatusService.on("change", onChange); + for (const snapshot of serviceStatusService.getAll()) { + emit.next(snapshot); + } + return () => { + serviceStatusService.off("change", onChange); + }; + }); + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/settings/index.ts b/apps/desktop/src/lib/trpc/routers/settings/index.ts index 7a871239483..87ed5c6b222 100644 --- a/apps/desktop/src/lib/trpc/routers/settings/index.ts +++ b/apps/desktop/src/lib/trpc/routers/settings/index.ts @@ -2,10 +2,14 @@ import { type AgentCustomDefinition, type AgentPresetOverrideEnvelope, BRANCH_PREFIX_MODES, + BRANCH_SORT_ORDERS, EXECUTION_MODES, EXTERNAL_APPS, + FILE_DRAG_BEHAVIORS, FILE_OPEN_MODES, NON_EDITOR_APPS, + POST_COMMIT_COMMANDS, + SMART_COMMIT_CHANGES_MODES, settings, TERMINAL_LINK_BEHAVIORS, type TerminalPreset, @@ -43,12 +47,17 @@ import { DEFAULT_AUTO_APPLY_DEFAULT_PRESET, DEFAULT_CONFIRM_ON_QUIT, DEFAULT_EXPOSE_HOST_SERVICE_VIA_RELAY, + DEFAULT_FILE_DRAG_BEHAVIOR, DEFAULT_FILE_OPEN_MODE, DEFAULT_OPEN_LINKS_IN_APP, + DEFAULT_PREVENT_AGENT_SLEEP, + DEFAULT_RIGHT_SIDEBAR_OPEN_VIEW_WIDTH, DEFAULT_SHOW_PRESETS_BAR, DEFAULT_SHOW_RESOURCE_MONITOR, DEFAULT_TERMINAL_LINK_BEHAVIOR, DEFAULT_USE_COMPACT_TERMINAL_ADD_BUTTON, + MAX_RIGHT_SIDEBAR_OPEN_VIEW_WIDTH, + MIN_RIGHT_SIDEBAR_OPEN_VIEW_WIDTH, } from "shared/constants"; import { normalizePresetProjectIds } from "shared/preset-project-targeting"; import { @@ -59,7 +68,12 @@ import { import { z } from "zod"; import { publicProcedure, router } from "../.."; import { loadToken } from "../auth/utils/auth-functions"; -import { getGitAuthorName, getGitHubUsername } from "../workspaces/utils/git"; +import { + getGitAuthorEmail, + getGitAuthorName, + getGitHubUsername, +} from "../workspaces/utils/git"; +import { getSimpleGitWithShellPath } from "../workspaces/utils/git-client"; import { createCustomAgentInputSchema, normalizeAgentPresetPatch, @@ -634,6 +648,26 @@ export const createSettingsRouter = () => { return { success: true }; }), + getPreventAgentSleep: publicProcedure.query(() => { + const row = getSettings(); + return row.preventAgentSleep ?? DEFAULT_PREVENT_AGENT_SLEEP; + }), + + setPreventAgentSleep: publicProcedure + .input(z.object({ enabled: z.boolean() })) + .mutation(({ input }) => { + localDb + .insert(settings) + .values({ id: 1, preventAgentSleep: input.enabled }) + .onConflictDoUpdate({ + target: settings.id, + set: { preventAgentSleep: input.enabled }, + }) + .run(); + + return { success: true }; + }), + getExposeHostServiceViaRelay: publicProcedure.query(() => { const row = getSettings(); return ( @@ -738,6 +772,11 @@ export const createSettingsRouter = () => { return row.fileOpenMode ?? DEFAULT_FILE_OPEN_MODE; }), + getFileDragBehavior: publicProcedure.query(() => { + const row = getSettings(); + return row.fileDragBehavior ?? DEFAULT_FILE_DRAG_BEHAVIOR; + }), + setFileOpenMode: publicProcedure .input(z.object({ mode: z.enum(FILE_OPEN_MODES) })) .mutation(({ input }) => { @@ -753,6 +792,51 @@ export const createSettingsRouter = () => { return { success: true }; }), + setFileDragBehavior: publicProcedure + .input(z.object({ behavior: z.enum(FILE_DRAG_BEHAVIORS) })) + .mutation(({ input }) => { + localDb + .insert(settings) + .values({ id: 1, fileDragBehavior: input.behavior }) + .onConflictDoUpdate({ + target: settings.id, + set: { fileDragBehavior: input.behavior }, + }) + .run(); + + return { success: true }; + }), + + getRightSidebarOpenViewWidth: publicProcedure.query(() => { + const row = getSettings(); + return ( + row.rightSidebarOpenViewWidth ?? DEFAULT_RIGHT_SIDEBAR_OPEN_VIEW_WIDTH + ); + }), + + setRightSidebarOpenViewWidth: publicProcedure + .input( + z.object({ + width: z + .number() + .int() + .min(MIN_RIGHT_SIDEBAR_OPEN_VIEW_WIDTH) + .max(MAX_RIGHT_SIDEBAR_OPEN_VIEW_WIDTH), + }), + ) + .mutation(({ input }) => { + localDb + .insert(settings) + .values({ id: 1, rightSidebarOpenViewWidth: input.width }) + .onConflictDoUpdate({ + target: settings.id, + set: { rightSidebarOpenViewWidth: input.width }, + }) + .run(); + + return { success: true }; + }), + getAutoApplyDefaultPreset: publicProcedure.query(() => { const row = getSettings(); return row.autoApplyDefaultPreset ?? DEFAULT_AUTO_APPLY_DEFAULT_PRESET; @@ -817,13 +901,36 @@ export const createSettingsRouter = () => { getGitInfo: publicProcedure.query(async () => { const githubUsername = await getGitHubUsername(); const authorName = await getGitAuthorName(); + const authorEmail = await getGitAuthorEmail(); return { githubUsername, authorName, + authorEmail, authorPrefix: authorName?.toLowerCase().replace(/\s+/g, "-") ?? null, }; }), + setGlobalGitUserConfig: publicProcedure + .input( + z.object({ + name: z.string().trim().min(1, "Name is required"), + email: z + .string() + .trim() + .min(1, "Email is required") + .email("Must be a valid email"), + }), + ) + .mutation(async ({ input }) => { + // Write to the user's global git config so the identity is + // picked up by every future repository. `simple-git` resolves + // the same path as `git config --global`. + const git = await getSimpleGitWithShellPath(); + await git.addConfig("user.name", input.name, false, "global"); + await git.addConfig("user.email", input.email, false, "global"); + return { success: true }; + }), + getDeleteLocalBranch: publicProcedure.query(() => { const row = getSettings(); return row.deleteLocalBranch ?? false; @@ -844,6 +951,116 @@ export const createSettingsRouter = () => { return { success: true }; }), + getSmartCommit: publicProcedure.query(() => { + const row = getSettings(); + return { + enabled: row.enableSmartCommit ?? false, + changes: row.smartCommitChanges ?? "all", + }; + }), + + setSmartCommit: publicProcedure + .input( + z.object({ + enabled: z.boolean(), + changes: z.enum(SMART_COMMIT_CHANGES_MODES), + }), + ) + .mutation(({ input }) => { + localDb + .insert(settings) + .values({ + id: 1, + enableSmartCommit: input.enabled, + smartCommitChanges: input.changes, + }) + .onConflictDoUpdate({ + target: settings.id, + set: { + enableSmartCommit: input.enabled, + smartCommitChanges: input.changes, + }, + }) + .run(); + + return { success: true }; + }), + + getAutoStash: publicProcedure.query(() => { + const row = getSettings(); + return row.autoStash ?? false; + }), + + setAutoStash: publicProcedure + .input(z.object({ enabled: z.boolean() })) + .mutation(({ input }) => { + localDb + .insert(settings) + .values({ id: 1, autoStash: input.enabled }) + .onConflictDoUpdate({ + target: settings.id, + set: { autoStash: input.enabled }, + }) + .run(); + + return { success: true }; + }), + + getBranchSortOrder: publicProcedure.query(() => { + const row = getSettings(); + return { + sortOrder: row.branchSortOrder ?? "committerdate", + pinDefault: row.pinDefaultBranch ?? true, + }; + }), + + setBranchSortOrder: publicProcedure + .input( + z.object({ + sortOrder: z.enum(BRANCH_SORT_ORDERS), + pinDefault: z.boolean(), + }), + ) + .mutation(({ input }) => { + localDb + .insert(settings) + .values({ + id: 1, + branchSortOrder: input.sortOrder, + pinDefaultBranch: input.pinDefault, + }) + .onConflictDoUpdate({ + target: settings.id, + set: { + branchSortOrder: input.sortOrder, + pinDefaultBranch: input.pinDefault, + }, + }) + .run(); + + return { success: true }; + }), + + getPostCommitCommand: publicProcedure.query(() => { + const row = getSettings(); + return row.postCommitCommand ?? "none"; + }), + + setPostCommitCommand: publicProcedure + .input(z.object({ command: z.enum(POST_COMMIT_COMMANDS) })) + .mutation(({ input }) => { + localDb + .insert(settings) + .values({ id: 1, postCommitCommand: input.command }) + .onConflictDoUpdate({ + target: settings.id, + set: { postCommitCommand: input.command }, + }) + .run(); + + return { success: true }; + }), + getNotificationSoundsMuted: publicProcedure.query(() => { const row = getSettings(); return row.notificationSoundsMuted ?? false; @@ -884,6 +1101,124 @@ export const createSettingsRouter = () => { return { success: true }; }), + getAivisSettings: publicProcedure.query(() => { + const row = getSettings(); + return { + enabled: row.aivisEnabled ?? false, + apiKey: row.aivisApiKey ?? "", + modelUuid: row.aivisModelUuid ?? "", + userDictionaryUuid: row.aivisUserDictionaryUuid ?? "", + format: row.aivisFormat ?? "ワークスペース、{{workspace}}、です", + formatPermission: + row.aivisFormatPermission ?? "{{branch}}で対応が必要です", + volume: + typeof row.aivisVolume === "number" && + Number.isFinite(row.aivisVolume) + ? Math.max(0, Math.min(100, row.aivisVolume)) + : 100, + speakingRate: + typeof row.aivisSpeakingRate === "number" && + Number.isFinite(row.aivisSpeakingRate) + ? Math.max(0.5, Math.min(2.0, row.aivisSpeakingRate)) + : 1.0, + modelPresets: row.aivisModelPresets ?? [], + }; + }), + + setAivisSettings: publicProcedure + .input( + z.object({ + enabled: z.boolean().optional(), + apiKey: z.string().optional(), + modelUuid: z.string().optional(), + userDictionaryUuid: z.string().optional(), + format: z.string().optional(), + formatPermission: z.string().optional(), + volume: z.number().int().min(0).max(100).optional(), + speakingRate: z.number().min(0.5).max(2.0).optional(), + modelPresets: z + .array( + z.object({ + uuid: z.string().uuid(), + name: z.string(), + iconUrl: z.string().nullable(), + sampleUrl: z.string().nullable().optional(), + }), + ) + .optional(), + }), + ) + .mutation(({ input }) => { + const values: Record = { id: 1 }; + const set: Record = {}; + if (input.enabled !== undefined) { + values.aivisEnabled = input.enabled; + set.aivisEnabled = input.enabled; + } + if (input.apiKey !== undefined) { + values.aivisApiKey = input.apiKey; + set.aivisApiKey = input.apiKey; + } + if (input.modelUuid !== undefined) { + values.aivisModelUuid = input.modelUuid; + set.aivisModelUuid = input.modelUuid; + } + if (input.userDictionaryUuid !== undefined) { + values.aivisUserDictionaryUuid = input.userDictionaryUuid; + set.aivisUserDictionaryUuid = input.userDictionaryUuid; + } + if (input.format !== undefined) { + values.aivisFormat = input.format; + set.aivisFormat = input.format; + } + if (input.formatPermission !== undefined) { + values.aivisFormatPermission = input.formatPermission; + set.aivisFormatPermission = input.formatPermission; + } + if (input.volume !== undefined) { + values.aivisVolume = input.volume; + set.aivisVolume = input.volume; + } + if (input.speakingRate !== undefined) { + values.aivisSpeakingRate = input.speakingRate; + set.aivisSpeakingRate = input.speakingRate; + } + if (input.modelPresets !== undefined) { + values.aivisModelPresets = input.modelPresets; + set.aivisModelPresets = input.modelPresets; + } + localDb + .insert(settings) + .values(values) + .onConflictDoUpdate({ target: settings.id, set }) + .run(); + return { success: true }; + }), + + testAivisPlayback: publicProcedure + .input( + z.object({ + apiKey: z.string(), + modelUuid: z.string(), + text: z.string().min(1).max(3000), + userDictionaryUuid: z.string().uuid().optional(), + speakingRate: z.number().min(0.5).max(2.0).optional(), + }), + ) + .mutation(async ({ input }) => { + const { playAivisTts } = await import( + "main/lib/notifications/aivis-tts" + ); + await playAivisTts({ + apiKey: input.apiKey, + modelUuid: input.modelUuid, + text: input.text, + userDictionaryUuid: input.userDictionaryUuid, + speakingRate: input.speakingRate, + }); + return { success: true }; + }), + getFontSettings: publicProcedure.query(() => { const row = getSettings(); return { @@ -1004,6 +1339,117 @@ export const createSettingsRouter = () => { return { success: true }; }), + getIndentRainbow: publicProcedure.query(() => { + const row = getSettings(); + const colors = row.indentRainbowColors + ? (JSON.parse(row.indentRainbowColors) as string[]) + : null; + return { + enabled: row.indentRainbowEnabled ?? false, + colors, + }; + }), + + setIndentRainbow: publicProcedure + .input( + z.object({ + enabled: z.boolean().optional(), + colors: z.array(z.string()).nullable().optional(), + }), + ) + .mutation(({ input }) => { + const set: Record = {}; + if (input.enabled !== undefined) { + set.indentRainbowEnabled = input.enabled; + } + if (input.colors !== undefined) { + set.indentRainbowColors = input.colors + ? JSON.stringify(input.colors) + : null; + } + + if (Object.keys(set).length === 0) { + return { success: true }; + } + + localDb + .insert(settings) + .values({ id: 1, ...set }) + .onConflictDoUpdate({ + target: settings.id, + set, + }) + .run(); + + return { success: true }; + }), + + getTrailingSpaces: publicProcedure.query(() => { + const row = getSettings(); + return { + enabled: row.trailingSpacesEnabled ?? false, + color: row.trailingSpacesColor ?? null, + }; + }), + + setTrailingSpaces: publicProcedure + .input( + z.object({ + enabled: z.boolean().optional(), + color: z.string().nullable().optional(), + }), + ) + .mutation(({ input }) => { + const set: Record = {}; + if (input.enabled !== undefined) { + set.trailingSpacesEnabled = input.enabled; + } + if (input.color !== undefined) { + set.trailingSpacesColor = input.color; + } + + if (Object.keys(set).length === 0) { + return { success: true }; + } + + localDb + .insert(settings) + .values({ id: 1, ...set }) + .onConflictDoUpdate({ + target: settings.id, + set, + }) + .run(); + + return { success: true }; + }), + + getReferenceGraph: publicProcedure.query(() => { + const row = getSettings(); + return { + enabled: row.referenceGraphEnabled ?? true, + }; + }), + + setReferenceGraph: publicProcedure + .input( + z.object({ + enabled: z.boolean(), + }), + ) + .mutation(({ input }) => { + localDb + .insert(settings) + .values({ id: 1, referenceGraphEnabled: input.enabled }) + .onConflictDoUpdate({ + target: settings.id, + set: { referenceGraphEnabled: input.enabled }, + }) + .run(); + + return { success: true }; + }), + // TODO: remove telemetry procedures once telemetry_enabled column is dropped getTelemetryEnabled: publicProcedure.query(() => { return true; diff --git a/apps/desktop/src/lib/trpc/routers/tab-tearoff.ts b/apps/desktop/src/lib/trpc/routers/tab-tearoff.ts new file mode 100644 index 00000000000..bf0addef948 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/tab-tearoff.ts @@ -0,0 +1,44 @@ +import type { WindowManager } from "main/lib/window-manager"; +import { z } from "zod"; +import { publicProcedure, router } from ".."; +import { loadToken } from "./auth/utils/auth-functions"; + +export const createTabTearoffRouter = (wm: WindowManager) => { + return router({ + create: publicProcedure + .input( + z.object({ + tab: z.unknown(), + panes: z.record(z.string(), z.unknown()), + workspaceId: z.string(), + screenX: z.number(), + screenY: z.number(), + }), + ) + .mutation(async ({ input }) => { + const windowId = `tearoff-${Date.now()}`; + + // Store data FIRST so it's available when preload requests it + wm.setPendingTearoffData(windowId, { + tab: input.tab, + panes: input.panes, + workspaceId: input.workspaceId, + }); + + // Pre-load auth token so tearoff window can skip async auth hydration + const { token, expiresAt } = await loadToken(); + wm.setPendingAuthToken( + windowId, + token && expiresAt ? { token, expiresAt } : null, + ); + + wm.createTearoffWindow({ + windowId, + screenX: input.screenX, + screenY: input.screenY, + }); + + return { windowId }; + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/terminal/terminal.ts b/apps/desktop/src/lib/trpc/routers/terminal/terminal.ts index dc01978fb40..79e295f9d04 100644 --- a/apps/desktop/src/lib/trpc/routers/terminal/terminal.ts +++ b/apps/desktop/src/lib/trpc/routers/terminal/terminal.ts @@ -214,12 +214,18 @@ export const createTerminalRouter = () => { paneId: z.string(), data: z.string(), throwOnError: z.boolean().optional(), + interactive: z.boolean().optional(), }), ) .mutation(async ({ input }) => { const shouldThrow = input.throwOnError ?? false; try { - terminal.write(input); + await terminal.write({ + paneId: input.paneId, + data: input.data, + requireAck: shouldThrow, + interactive: input.interactive, + }); } catch (error) { const message = error instanceof Error ? error.message : "Write failed"; @@ -419,6 +425,20 @@ export const createTerminalRouter = () => { return restartDaemonShared(); }), + getSuggestions: publicProcedure + .input(z.object({ prefix: z.string(), offset: z.number().optional() })) + .query(async ({ input }) => { + const { getSuggestions } = await import("main/lib/shell-history"); + return getSuggestions(input.prefix, input.offset ?? 0); + }), + + deleteHistorySuggestion: publicProcedure + .input(z.object({ command: z.string().min(1) })) + .mutation(async ({ input }) => { + const { deleteHistoryEntry } = await import("main/lib/shell-history"); + await deleteHistoryEntry(input.command); + }), + getSession: publicProcedure .input(z.string()) .query(async ({ input: paneId }) => { diff --git a/apps/desktop/src/lib/trpc/routers/ui-state/index.ts b/apps/desktop/src/lib/trpc/routers/ui-state/index.ts index 40b752eb072..c8cd30db854 100644 --- a/apps/desktop/src/lib/trpc/routers/ui-state/index.ts +++ b/apps/desktop/src/lib/trpc/routers/ui-state/index.ts @@ -10,11 +10,11 @@ import { publicProcedure, router } from "../.."; */ const fileViewerStateSchema = z.object({ filePath: z.string(), - viewMode: z.enum(["rendered", "raw", "diff"]), + viewMode: z.enum(["rendered", "raw", "diff", "conflict"]), isPinned: z.boolean(), diffLayout: z.enum(["inline", "side-by-side"]), diffCategory: z - .enum(["against-base", "committed", "staged", "unstaged"]) + .enum(["against-base", "committed", "staged", "unstaged", "conflicted"]) .optional(), commitHash: z.string().optional(), oldPath: z.string().optional(), @@ -36,7 +36,18 @@ const chatLaunchConfigSchema = z.object({ const paneSchema = z.object({ id: z.string(), tabId: z.string(), - type: z.enum(["terminal", "webview", "file-viewer", "chat", "devtools"]), + type: z.enum([ + "terminal", + "webview", + "file-viewer", + "chat", + "devtools", + "git-graph", + "database-explorer", + "action-logs", + "vscode-extension", + "reference-graph", + ]), name: z.string(), isNew: z.boolean().optional(), status: z.enum(["idle", "working", "permission", "review"]).optional(), @@ -79,6 +90,50 @@ const paneSchema = z.object({ targetPaneId: z.string(), }) .optional(), + databaseExplorer: z + .object({ + connectionId: z.string().nullable(), + }) + .optional(), + actionLogs: z + .object({ + jobs: z.array( + z.object({ + detailsUrl: z.string(), + name: z.string(), + status: z.enum([ + "success", + "failure", + "pending", + "skipped", + "cancelled", + ]), + }), + ), + initialJobIndex: z.number().optional(), + }) + .optional(), + vscodeExtension: z + .object({ + viewType: z.string(), + extensionId: z.string(), + source: z.enum(["view", "panel"]).optional(), + sessionId: z.string().optional(), + }) + .optional(), + gitGraph: z + .object({ + worktreePath: z.string(), + }) + .optional(), + referenceGraph: z + .object({ + absolutePath: z.string(), + languageId: z.string(), + line: z.number(), + column: z.number(), + }) + .optional(), workspaceRun: z .object({ workspaceId: z.string(), @@ -204,7 +259,98 @@ const terminalColorsSchema = z.object({ }); /** - * Zod schema for Theme + * Zod schema for editor chrome colors. + * Mirrors EditorColors in shared/themes/types.ts. + */ +const editorColorsSchema = z.object({ + background: z.string(), + foreground: z.string(), + border: z.string(), + cursor: z.string(), + gutterBackground: z.string(), + gutterForeground: z.string(), + activeLine: z.string(), + selection: z.string(), + search: z.string(), + searchActive: z.string(), + panel: z.string(), + panelBorder: z.string(), + panelInputBackground: z.string(), + panelInputForeground: z.string(), + panelInputBorder: z.string(), + panelButtonBackground: z.string(), + panelButtonForeground: z.string(), + panelButtonBorder: z.string(), + diffBuffer: z.string(), + diffHover: z.string(), + diffSeparator: z.string(), + addition: z.string(), + deletion: z.string(), + modified: z.string(), +}); + +/** + * Zod schema for editor syntax colors. + * Mirrors EditorSyntaxColors in shared/themes/types.ts. + */ +const editorSyntaxColorsSchema = z.object({ + plainText: z.string(), + comment: z.string(), + docComment: z.string(), + keyword: z.string(), + controlKeyword: z.string(), + storageKeyword: z.string(), + string: z.string(), + escape: z.string(), + number: z.string(), + functionCall: z.string(), + variableName: z.string(), + variableProperty: z.string(), + typeName: z.string(), + className: z.string(), + constant: z.string(), + regexp: z.string(), + tagName: z.string(), + attributeName: z.string(), + invalid: z.string(), + annotation: z.string(), + operator: z.string(), + punctuation: z.string(), + markdownHeading: z.string(), + markdownEmphasis: z.string(), + markdownStrong: z.string(), + markdownStrikethrough: z.string(), + markdownLink: z.string(), + markdownUrl: z.string(), + markdownCode: z.string(), + markdownQuote: z.string(), + markdownList: z.string(), + markdownSeparator: z.string(), + meta: z.string(), +}); + +/** + * Zod schema for EditorThemeOverrides. + * Both `colors` and `syntax` accept partial shapes so imported themes that + * only override a subset of tokens still round-trip through persistence. + */ +const editorThemeOverridesSchema = z.object({ + colors: editorColorsSchema.partial().optional(), + syntax: editorSyntaxColorsSchema.partial().optional(), +}); + +/** + * Zod schema for Theme. + * + * `terminal` and `editor` are optional to match the Theme interface in + * shared/themes/types.ts. If they are missing, the app falls back to + * defaults derived from the theme type and base UI colors. + * + * Every field declared on the Theme interface MUST appear here — Zod's + * default `z.object()` silently strips unknown keys during + * `.input(...)` validation on the `theme.set` tRPC mutation, which + * means any missing field would be dropped on every persist cycle and + * lost after app restart. */ const themeSchema = z.object({ id: z.string(), @@ -214,7 +360,8 @@ const themeSchema = z.object({ description: z.string().optional(), type: z.enum(["dark", "light"]), ui: uiColorsSchema, - terminal: terminalColorsSchema, + terminal: terminalColorsSchema.optional(), + editor: editorThemeOverridesSchema.optional(), isBuiltIn: z.boolean().optional(), isCustom: z.boolean().optional(), }); @@ -229,6 +376,11 @@ const themeStateSchema = z.object({ systemDarkThemeId: z.string().optional(), }); +export const __testing = { + themeSchema, + themeStateSchema, +}; + /** * UI State router - manages tabs and theme persistence via lowdb */ diff --git a/apps/desktop/src/lib/trpc/routers/ui-state/ui-state-schema.test.ts b/apps/desktop/src/lib/trpc/routers/ui-state/ui-state-schema.test.ts new file mode 100644 index 00000000000..839ed02c892 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/ui-state/ui-state-schema.test.ts @@ -0,0 +1,118 @@ +import { describe, expect, it } from "bun:test"; +import { darkTheme } from "shared/themes"; +import { __testing } from "./index"; + +const { themeSchema, themeStateSchema } = __testing; + +describe("themeSchema", () => { + it("preserves the editor field on a custom theme", () => { + const input = { + id: "my-custom", + name: "My Custom", + type: "dark" as const, + ui: darkTheme.ui, + terminal: darkTheme.terminal, + editor: { + colors: { + background: "#111111", + foreground: "#eeeeee", + }, + syntax: { + keyword: "#ff6688", + string: "#88ff66", + }, + }, + isCustom: true, + }; + + const parsed = themeSchema.parse(input); + + expect(parsed.editor).toBeDefined(); + expect(parsed.editor?.colors?.background).toBe("#111111"); + expect(parsed.editor?.colors?.foreground).toBe("#eeeeee"); + expect(parsed.editor?.syntax?.keyword).toBe("#ff6688"); + expect(parsed.editor?.syntax?.string).toBe("#88ff66"); + }); + + it("accepts a theme without terminal overrides", () => { + const input = { + id: "no-terminal", + name: "No Terminal", + type: "light" as const, + ui: darkTheme.ui, + isCustom: true, + }; + + expect(() => themeSchema.parse(input)).not.toThrow(); + }); + + it("accepts a theme without editor overrides", () => { + const input = { + id: "no-editor", + name: "No Editor", + type: "dark" as const, + ui: darkTheme.ui, + terminal: darkTheme.terminal, + isCustom: true, + }; + + const parsed = themeSchema.parse(input); + expect(parsed.editor).toBeUndefined(); + }); + + it("preserves partial editor.colors overrides", () => { + const input = { + id: "partial-colors", + name: "Partial Colors", + type: "dark" as const, + ui: darkTheme.ui, + editor: { + colors: { + addition: "#00ff00", + }, + }, + }; + + const parsed = themeSchema.parse(input); + expect(parsed.editor?.colors?.addition).toBe("#00ff00"); + }); + + it("preserves partial editor.syntax overrides", () => { + const input = { + id: "partial-syntax", + name: "Partial Syntax", + type: "dark" as const, + ui: darkTheme.ui, + editor: { + syntax: { + markdownHeading: "#abcdef", + }, + }, + }; + + const parsed = themeSchema.parse(input); + expect(parsed.editor?.syntax?.markdownHeading).toBe("#abcdef"); + }); + + it("round-trips a full theme state with editor overrides via themeStateSchema", () => { + const customTheme = { + id: "round-trip", + name: "Round Trip", + type: "dark" as const, + ui: darkTheme.ui, + editor: { + colors: { background: "#000000" }, + syntax: { keyword: "#ffffff" }, + }, + isCustom: true, + }; + + const parsed = themeStateSchema.parse({ + activeThemeId: "round-trip", + customThemes: [customTheme], + }); + + expect(parsed.customThemes[0]?.editor?.colors?.background).toBe("#000000"); + expect(parsed.customThemes[0]?.editor?.syntax?.keyword).toBe("#ffffff"); + }); +}); diff --git a/apps/desktop/src/lib/trpc/routers/vibrancy.ts b/apps/desktop/src/lib/trpc/routers/vibrancy.ts new file mode 100644 index 00000000000..38a0e5626c3 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/vibrancy.ts @@ -0,0 +1,92 @@ +import { observable } from "@trpc/server/observable"; +import { nativeTheme } from "electron"; +import { appState } from "main/lib/app-state"; +import { + applyVibrancy, + DEFAULT_VIBRANCY_STATE, + isNativeContinuousBlurSupported, + isVibrancySupported, + normalizeVibrancyState, + type VibrancyBlurLevel, + type VibrancyState, +} from "main/lib/vibrancy"; +import { VIBRANCY_EVENTS, vibrancyEmitter } from "main/lib/vibrancy/emitter"; +import type { WindowManager } from "main/lib/window-manager"; +import { z } from "zod"; +import { publicProcedure, router } from ".."; + +const blurLevelSchema: z.ZodType = z.enum([ + "subtle", + "standard", + "strong", + "ultra", +]); + +const vibrancyInputSchema = z.object({ + enabled: z.boolean().optional(), + opacity: z.number().int().min(0).max(100).optional(), + blurLevel: blurLevelSchema.optional(), + blurRadius: z.number().min(0).max(100).optional(), +}); + +function getCurrentState(): VibrancyState { + const stored = appState.data?.vibrancyState; + // Merge over defaults so older on-disk states (written before we added + // blurRadius) still produce a complete VibrancyState. Otherwise the + // missing field would round-trip as `undefined` and the slider would + // appear to reset on every restart. + return { ...DEFAULT_VIBRANCY_STATE, ...stored }; +} + +async function writeState(next: VibrancyState): Promise { + if (!appState.data) return; + appState.data.vibrancyState = next; + await appState.write(); +} + +function broadcastVibrancy(wm: WindowManager, state: VibrancyState): void { + const isDark = nativeTheme.shouldUseDarkColors; + for (const window of wm.getAll().values()) { + applyVibrancy(window, state, isDark); + } +} + +export const createVibrancyRouter = (wm: WindowManager) => { + return router({ + getSupported: publicProcedure.query(() => { + return { + supported: isVibrancySupported(), + nativeBlurSupported: isNativeContinuousBlurSupported(), + }; + }), + + get: publicProcedure.query(() => { + return getCurrentState(); + }), + + set: publicProcedure + .input(vibrancyInputSchema) + .mutation(async ({ input }) => { + const current = getCurrentState(); + const next = normalizeVibrancyState(input, current); + await writeState(next); + broadcastVibrancy(wm, next); + vibrancyEmitter.emit(VIBRANCY_EVENTS.CHANGED, next); + return next; + }), + + onChanged: publicProcedure.subscription(() => { + return observable((emit) => { + const handler = (state: VibrancyState) => { + emit.next(state); + }; + vibrancyEmitter.on(VIBRANCY_EVENTS.CHANGED, handler); + return () => { + vibrancyEmitter.off(VIBRANCY_EVENTS.CHANGED, handler); + }; + }); + }), + }); +}; + +export type VibrancyRouter = ReturnType; diff --git a/apps/desktop/src/lib/trpc/routers/vscode-extensions/index.ts b/apps/desktop/src/lib/trpc/routers/vscode-extensions/index.ts new file mode 100644 index 00000000000..d328440a5a5 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/vscode-extensions/index.ts @@ -0,0 +1,565 @@ +import { spawnSync } from "node:child_process"; +import fs from "node:fs"; +import os from "node:os"; +import path from "node:path"; +import { pipeline } from "node:stream/promises"; +import { TRPCError } from "@trpc/server"; +import { observable } from "@trpc/server/observable"; +import { + getActivePanel, + getActiveView, +} from "main/lib/vscode-shim/api/webview"; +import { + clearWebviewHtml, + getWebviewUrl, + hasWebviewHtml, + setCustomThemeCss, + setWebviewHtml, +} from "main/lib/vscode-shim/api/webview-server"; +import { getExtensionHostManager } from "main/lib/vscode-shim/extension-host-manager"; +import type { WebviewBridgeEvent } from "main/lib/vscode-shim/webview-bridge"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; + +/** Known VS Code extensions that can be managed */ +const KNOWN_EXTENSIONS = [ + { + id: "anthropic.claude-code", + name: "Claude Code", + publisher: "Anthropic", + description: "AI coding assistant by Anthropic", + marketplaceUrl: + "https://marketplace.visualstudio.com/items?itemName=anthropic.claude-code", + viewType: "claudeVSCodeSidebar", + }, + { + id: "openai.chatgpt", + name: "ChatGPT / Codex", + publisher: "OpenAI", + description: "AI coding assistant by OpenAI", + marketplaceUrl: + "https://marketplace.visualstudio.com/items?itemName=openai.chatgpt", + viewType: "chatgpt.sidebarView", + }, + { + id: "moonshot-ai.kimi-code", + name: "Kimi Code", + publisher: "Moonshot AI", + description: "AI coding assistant by Moonshot AI", + marketplaceUrl: + "https://marketplace.visualstudio.com/items?itemName=moonshot-ai.kimi-code", + viewType: "kimi.webview", + }, +] as const; + +function getExtensionsDir(): string { + return path.join(os.homedir(), ".vscode", "extensions"); +} + +/** Persistent enabled/disabled state for extensions */ +function getEnabledConfigPath(): string { + const userDataPath = (() => { + try { + return require("electron").app.getPath("userData"); + } catch { + return path.join(os.homedir(), ".superset-desktop"); + } + })(); + return path.join(userDataPath, "vscode-extensions-enabled.json"); +} + +function readEnabledConfig(): Record { + try { + const p = getEnabledConfigPath(); + if (fs.existsSync(p)) { + return JSON.parse(fs.readFileSync(p, "utf-8")); + } + } catch {} + // All enabled by default + return {}; +} + +function writeEnabledConfig(config: Record): void { + try { + const p = getEnabledConfigPath(); + fs.mkdirSync(path.dirname(p), { recursive: true }); + fs.writeFileSync(p, JSON.stringify(config, null, 2)); + } catch {} +} + +function isExtensionEnabled(extensionId: string): boolean { + const config = readEnabledConfig(); + return config[extensionId] !== false; // enabled by default +} + +function isExtensionInstalled(extensionId: string): boolean { + const dir = getExtensionsDir(); + if (!fs.existsSync(dir)) return false; + const entries = fs.readdirSync(dir); + return entries.some((entry) => + entry.toLowerCase().startsWith(extensionId.toLowerCase()), + ); +} + +/** + * Download a VS Code extension from the marketplace and extract to extensions dir. + * Uses the VS Code Marketplace Gallery API to fetch the .vsix package. + */ +async function downloadAndInstallExtension(extensionId: string): Promise { + // Validate against known extensions whitelist + if (!KNOWN_EXTENSIONS.some((e) => e.id === extensionId)) { + throw new Error(`Unknown extension: ${extensionId}`); + } + + // Strict format validation + if (!/^[a-zA-Z0-9_-]+\.[a-zA-Z0-9_-]+$/.test(extensionId)) { + throw new Error(`Invalid extension ID format: ${extensionId}`); + } + + const [publisher, name] = extensionId.split("."); + if (!publisher || !name) { + throw new Error(`Invalid extension ID: ${extensionId}`); + } + + const extensionsDir = getExtensionsDir(); + fs.mkdirSync(extensionsDir, { recursive: true }); + + // Step 1: Query marketplace for latest version + download URL + const queryBody = JSON.stringify({ + filters: [ + { + criteria: [{ filterType: 7, value: `${publisher}.${name}` }], + }, + ], + flags: 0x200 | 0x1, // IncludeFiles | IncludeVersions + }); + + const queryResponse = await fetch( + "https://marketplace.visualstudio.com/_apis/public/gallery/extensionquery", + { + method: "POST", + headers: { + "Content-Type": "application/json", + Accept: "application/json;api-version=6.0-preview.1", + }, + body: queryBody, + }, + ); + + if (!queryResponse.ok) { + throw new Error(`Marketplace query failed: ${queryResponse.status}`); + } + + const queryData = (await queryResponse.json()) as { + results: Array<{ + extensions: Array<{ + versions: Array<{ + version: string; + targetPlatform?: string; + files: Array<{ assetType: string; source: string }>; + }>; + }>; + }>; + }; + + const ext = queryData.results?.[0]?.extensions?.[0]; + if (!ext) { + throw new Error(`Extension not found: ${extensionId}`); + } + + // Find the best matching version (prefer platform-specific) + const platform = `${process.platform}-${process.arch}`; + const platformVersion = ext.versions.find( + (v) => v.targetPlatform === platform, + ); + const universalVersion = ext.versions.find( + (v) => !v.targetPlatform || v.targetPlatform === "universal", + ); + const version = platformVersion ?? universalVersion ?? ext.versions[0]; + if (!version) { + throw new Error(`No version found for ${extensionId}`); + } + + // Find VSIX download URL + const vsixAsset = version.files.find( + (f) => f.assetType === "Microsoft.VisualStudio.Services.VSIXPackage", + ); + if (!vsixAsset) { + throw new Error(`No VSIX package found for ${extensionId}`); + } + + // Step 2: Download .vsix + const vsixResponse = await fetch(vsixAsset.source); + if (!vsixResponse.ok || !vsixResponse.body) { + throw new Error(`VSIX download failed: ${vsixResponse.status}`); + } + + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "vscode-ext-")); + const vsixPath = path.join(tmpDir, `${extensionId}.vsix`); + + const fileStream = fs.createWriteStream(vsixPath); + // @ts-expect-error - Node fetch body is a ReadableStream + await pipeline(vsixResponse.body, fileStream); + + // Step 3: Extract .vsix (it's a zip file) + const targetSuffix = version.targetPlatform + ? `-${version.targetPlatform}` + : ""; + const extDir = path.join( + extensionsDir, + `${publisher}.${name}-${version.version}${targetSuffix}`, + ); + + try { + if (fs.existsSync(extDir)) { + fs.rmSync(extDir, { recursive: true }); + } + fs.mkdirSync(extDir, { recursive: true }); + + // Extract .vsix using spawnSync (no shell injection risk) + const extractDir = path.join(tmpDir, "extracted"); + const unzipResult = spawnSync( + "unzip", + ["-q", "-o", vsixPath, "-d", extractDir], + { + stdio: "pipe", + }, + ); + if (unzipResult.status !== 0) { + throw new Error(`unzip failed: ${unzipResult.stderr?.toString()}`); + } + + // Copy extension content using Node.js fs (no shell commands) + const extractedExtDir = path.join(extractDir, "extension"); + if (fs.existsSync(extractedExtDir)) { + fs.cpSync(extractedExtDir, extDir, { recursive: true }); + } + + // Copy vsixmanifest if present + const vsixManifest = path.join(extractDir, "extension.vsixmanifest"); + if (fs.existsSync(vsixManifest)) { + fs.copyFileSync(vsixManifest, path.join(extDir, ".vsixmanifest")); + } + } finally { + // Always cleanup temp directory + fs.rmSync(tmpDir, { recursive: true, force: true }); + } +} + +async function waitForWebviewHtml( + viewId: string, + timeoutMs = 5000, + pollIntervalMs = 100, +): Promise { + if (hasWebviewHtml(viewId)) { + return true; + } + + const deadline = Date.now() + timeoutMs; + while (Date.now() < deadline) { + await new Promise((resolve) => setTimeout(resolve, pollIntervalMs)); + if (hasWebviewHtml(viewId)) { + return true; + } + } + + return hasWebviewHtml(viewId); +} + +export const createVscodeExtensionsRouter = () => { + return router({ + /** Get all known extensions with their install/active status */ + getKnownExtensions: publicProcedure.query(() => { + const manager = getExtensionHostManager(); + const hasRunningExtensionHost = + manager.getRunningWorkspaceIds().length > 0; + return KNOWN_EXTENSIONS.map((ext) => { + const installed = isExtensionInstalled(ext.id); + const enabled = isExtensionEnabled(ext.id); + return { + ...ext, + installed, + enabled, + active: installed && enabled && hasRunningExtensionHost, + }; + }); + }), + + /** Resolve a webview view for a given viewType, returns viewId + HTML */ + resolveWebview: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + workspacePath: z.string(), + viewType: z.string(), + extensionPath: z.string(), + }), + ) + .mutation(async ({ input }) => { + const manager = getExtensionHostManager(); + + // Start worker for this workspace if not already running + if (!manager.isRunning(input.workspaceId)) { + await manager.start(input.workspaceId, input.workspacePath); + } + + const result = await manager.resolveWebview( + input.workspaceId, + input.viewType, + input.extensionPath, + ); + + if (!result.viewId) { + return { viewId: null, url: null }; + } + + if (result.html) { + setWebviewHtml(result.viewId, result.html); + } + + const url = getWebviewUrl(result.viewId); + return { viewId: result.viewId, url }; + }), + + /** Attach to an existing webview session by viewId/panelId */ + attachWebview: publicProcedure + .input( + z.object({ + viewId: z.string(), + }), + ) + .mutation(async ({ input }) => { + const target = + getActiveView(input.viewId) ?? getActivePanel(input.viewId); + if (!target) { + return { viewId: null, url: null }; + } + + const hasHtml = await waitForWebviewHtml(input.viewId); + if (!hasHtml) { + return { viewId: null, url: null }; + } + + return { viewId: input.viewId, url: getWebviewUrl(input.viewId) }; + }), + + /** Dispose an existing panel-backed webview session */ + disposeWebview: publicProcedure + .input( + z.object({ + viewId: z.string(), + }), + ) + .mutation(({ input }) => { + const panel = getActivePanel(input.viewId); + if (!panel) { + clearWebviewHtml(input.viewId); + return { success: false }; + } + + panel.dispose(); + clearWebviewHtml(input.viewId); + return { success: true }; + }), + + /** Get current webview HTML */ + getWebviewHtml: publicProcedure + .input(z.object({ viewType: z.string() })) + .query(() => { + return null; + }), + + /** Send a message from renderer to extension webview */ + postMessageToExtension: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + viewId: z.string(), + message: z.unknown(), + }), + ) + .mutation(({ input }) => { + const manager = getExtensionHostManager(); + manager.postMessageToExtension( + input.workspaceId, + input.viewId, + input.message, + ); + return { success: true }; + }), + + /** Enable or disable an extension (persisted, requires restart for full effect) */ + setExtensionEnabled: publicProcedure + .input( + z.object({ + extensionId: z.string(), + enabled: z.boolean(), + }), + ) + .mutation(async ({ input }) => { + if (!KNOWN_EXTENSIONS.some((e) => e.id === input.extensionId)) { + throw new TRPCError({ + code: "BAD_REQUEST", + message: "Unknown extension", + }); + } + const config = readEnabledConfig(); + config[input.extensionId] = input.enabled; + writeEnabledConfig(config); + + return { success: true, needsRestart: true }; + }), + + /** Set custom theme CSS for webview rendering (null = use default dark theme) */ + setThemeCss: publicProcedure + .input(z.object({ css: z.string().nullable() })) + .mutation(({ input }) => { + setCustomThemeCss(input.css); + return { success: true }; + }), + + /** Set the workspace folder path for extensions */ + setWorkspacePath: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + workspacePath: z.string(), + }), + ) + .mutation(({ input }) => { + const manager = getExtensionHostManager(); + manager.setWorkspacePath(input.workspaceId, input.workspacePath); + return { success: true }; + }), + + /** Notify main process of active file change (for activeTextEditor) */ + setActiveEditor: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + filePath: z.string().nullable(), + languageId: z.string().optional(), + }), + ) + .mutation(({ input }) => { + const manager = getExtensionHostManager(); + manager.setActiveEditor( + input.workspaceId, + input.filePath, + input.languageId, + ); + return { success: true }; + }), + + /** Download and install an extension from the VS Code Marketplace */ + installExtension: publicProcedure + .input(z.object({ extensionId: z.string() })) + .mutation(async ({ input }) => { + await downloadAndInstallExtension(input.extensionId); + return { success: true }; + }), + + /** Restart a specific extension (stops and restarts the workspace worker) */ + restartExtension: publicProcedure + .input( + z.object({ + extensionId: z.string(), + workspaceId: z.string().optional(), + }), + ) + .mutation(async ({ input }) => { + if (!input.workspaceId) { + const manager = getExtensionHostManager(); + const runningWorkspaceIds = manager.getRunningWorkspaceIds(); + if (runningWorkspaceIds.length === 0) { + return { success: false }; + } + + await Promise.all( + runningWorkspaceIds.map(async (workspaceId) => { + const workspacePath = manager.getWorkspacePath(workspaceId) ?? ""; + manager.stop(workspaceId); + await manager.start(workspaceId, workspacePath); + }), + ); + return { success: true }; + } + const manager = getExtensionHostManager(); + if (!manager.isRunning(input.workspaceId)) { + return { success: false }; + } + // Stop then explicitly restart (stop sets "stopped" status which prevents auto-restart) + const workspacePath = manager.getWorkspacePath(input.workspaceId) ?? ""; + manager.stop(input.workspaceId); + await manager.start(input.workspaceId, workspacePath); + return { success: true }; + }), + + /** Subscribe to file open requests from extensions (showTextDocument) */ + subscribeOpenFile: publicProcedure + .input(z.object({ workspaceId: z.string().optional() }).optional()) + .subscription(({ input }) => { + return observable<{ filePath: string; line?: number }>((emit) => { + const manager = getExtensionHostManager(); + const handler = ( + wsId: string, + data: { filePath: string; line?: number }, + ) => { + if (input?.workspaceId && wsId !== input.workspaceId) return; + emit.next(data); + }; + manager.on("open-file", handler); + return () => { + manager.off("open-file", handler); + }; + }); + }), + + /** Subscribe to diff open requests from extensions (vscode.diff calls) */ + subscribeDiff: publicProcedure + .input(z.object({ workspaceId: z.string().optional() }).optional()) + .subscription(({ input }) => { + return observable<{ + leftUri: string; + rightUri: string; + title?: string; + leftContent?: string; + }>((emit) => { + const manager = getExtensionHostManager(); + const handler = ( + wsId: string, + data: { + leftUri: string; + rightUri: string; + title?: string; + leftContent?: string; + }, + ) => { + if (input?.workspaceId && wsId !== input.workspaceId) return; + emit.next(data); + }; + manager.on("open-diff", handler); + return () => { + manager.off("open-diff", handler); + }; + }); + }), + + /** Subscribe to webview events (HTML changes, messages from extension) */ + subscribeWebview: publicProcedure + .input(z.object({ workspaceId: z.string().optional() }).optional()) + .subscription(({ input }) => { + return observable((emit) => { + const manager = getExtensionHostManager(); + const handler = (wsId: string, event: WebviewBridgeEvent) => { + if (input?.workspaceId && wsId !== input.workspaceId) return; + emit.next(event); + }; + manager.on("webview-event", handler); + return () => { + manager.off("webview-event", handler); + }; + }); + }), + }); +}; diff --git a/apps/desktop/src/lib/trpc/routers/window.ts b/apps/desktop/src/lib/trpc/routers/window.ts index 9923a0113ce..e701124a146 100644 --- a/apps/desktop/src/lib/trpc/routers/window.ts +++ b/apps/desktop/src/lib/trpc/routers/window.ts @@ -2,11 +2,15 @@ import fs from "node:fs/promises"; import { homedir } from "node:os"; import type { BrowserWindow } from "electron"; import { dialog } from "electron"; +import type { WindowManager } from "main/lib/window-manager"; import { getImageMimeType } from "shared/file-types"; import { z } from "zod"; import { publicProcedure, router } from ".."; -export const createWindowRouter = (getWindow: () => BrowserWindow | null) => { +export const createWindowRouter = ( + getWindow: () => BrowserWindow | null, + wm: WindowManager, +) => { return router({ minimize: publicProcedure.mutation(() => { const window = getWindow(); @@ -43,6 +47,16 @@ export const createWindowRouter = (getWindow: () => BrowserWindow | null) => { return process.platform; }), + shouldOwnSingletonEffects: publicProcedure + .input( + z.object({ + tearoffWindowId: z.string().nullable(), + }), + ) + .query(({ input }) => { + return wm.shouldWindowIdOwnSingletonEffects(input.tearoffWindowId); + }), + getHomeDir: publicProcedure.query(() => { return homedir(); }), diff --git a/apps/desktop/src/lib/trpc/routers/workspace-fs-service.ts b/apps/desktop/src/lib/trpc/routers/workspace-fs-service.ts index cd1e7e7cc02..a5fdc559ee7 100644 --- a/apps/desktop/src/lib/trpc/routers/workspace-fs-service.ts +++ b/apps/desktop/src/lib/trpc/routers/workspace-fs-service.ts @@ -1,4 +1,6 @@ +import { execFile, spawn } from "node:child_process"; import path from "node:path"; +import { promisify } from "node:util"; import { createFsHostService, type FsHostService, @@ -6,28 +8,100 @@ import { toRelativePath, type WorkspaceFsPathError, } from "@superset/workspace-fs/host"; +import { TRPCError } from "@trpc/server"; +import { rgPath as bundledRgPath } from "@vscode/ripgrep"; import { shell } from "electron"; import { getWorkspace } from "./workspaces/utils/db-helpers"; -import { execWithShellEnv } from "./workspaces/utils/shell-env"; import { getWorkspacePath } from "./workspaces/utils/worktree"; +const execFileAsync = promisify(execFile); + const filesystemWatcherManager = new FsWatcherManager(); +// electron-builder packs node_modules into app.asar, but native binaries can't +// execute from inside asar. We unpack @vscode/ripgrep via `asarUnpack` in +// electron-builder.ts, and at runtime we rewrite the path from the asar view +// to the asar.unpacked view so `execFile` can invoke it. +const rgExecutablePath = bundledRgPath.includes( + `${path.sep}app.asar${path.sep}`, +) + ? bundledRgPath.replace( + `${path.sep}app.asar${path.sep}`, + `${path.sep}app.asar.unpacked${path.sep}`, + ) + : bundledRgPath; + +async function* spawnBundledRipgrep( + args: string[], + options: { cwd: string; signal?: AbortSignal }, +): AsyncIterable { + // Streaming counterpart to `runRipgrep`: feeds searchContentStream so the + // Search tab can render matches as ripgrep emits them. We SIGTERM the + // child on abort instead of relying on `spawn`'s `signal` option so we + // can drain cleanly without propagating an AbortError into the generator. + const child = spawn(rgExecutablePath, args, { + cwd: options.cwd, + windowsHide: true, + }); + + const onAbort = () => { + if (!child.killed) child.kill("SIGTERM"); + }; + const signal = options.signal; + if (signal) { + if (signal.aborted) { + onAbort(); + } else { + signal.addEventListener("abort", onAbort, { once: true }); + } + } + + try { + child.stdout.setEncoding("utf8"); + for await (const chunk of child.stdout as AsyncIterable) { + if (signal?.aborted) return; + yield chunk; + } + await new Promise((resolve, reject) => { + child.once("error", reject); + child.once("close", (code) => { + if (signal?.aborted || code === null || code === 0 || code === 1) { + resolve(); + } else { + const err = new Error(`ripgrep exited with code ${code}`) as Error & { + code?: number; + }; + err.code = code; + reject(err); + } + }); + }); + } finally { + signal?.removeEventListener("abort", onAbort); + if (!child.killed) child.kill("SIGTERM"); + } +} + const sharedHostServiceOptions = { trashItem: async (absolutePath: string) => { await shell.trashItem(absolutePath); }, runRipgrep: async ( args: string[], - options: { cwd: string; maxBuffer: number }, + options: { cwd: string; maxBuffer: number; signal?: AbortSignal }, ) => { - const result = await execWithShellEnv("rg", args, { + // Shipping our own ripgrep (via @vscode/ripgrep) means users don't + // have to `brew install ripgrep` to get .gitignore-aware search. + // Matches VSCode's approach. + const result = await execFileAsync(rgExecutablePath, args, { cwd: options.cwd, maxBuffer: options.maxBuffer, windowsHide: true, + signal: options.signal, }); return { stdout: result.stdout }; }, + spawnRipgrep: spawnBundledRipgrep, }; export function resolveWorkspaceRootPath(workspaceId: string): string { @@ -81,7 +155,12 @@ export function toRegisteredWorktreeRelativePath( relativePath.startsWith(`..${path.sep}`) || path.isAbsolute(relativePath) ) { - throw new Error(`Path is outside worktree: ${absolutePath}`); + // This helper is only consumed by tRPC routers, so out-of-worktree access + // should be surfaced directly as BAD_REQUEST instead of bubbling as internal. + throw new TRPCError({ + code: "BAD_REQUEST", + message: `Path is outside worktree: ${absolutePath}`, + }); } return relativePath.replace(/\\/g, "/"); diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/github-extended.ts b/apps/desktop/src/lib/trpc/routers/workspaces/github-extended.ts new file mode 100644 index 00000000000..174254e1ed7 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/workspaces/github-extended.ts @@ -0,0 +1,1740 @@ +import { existsSync, readFileSync } from "node:fs"; +import path from "node:path"; +import type { GitHubStatus } from "@superset/local-db"; +import { workspaces, worktrees } from "@superset/local-db"; +import { TRPCError } from "@trpc/server"; +import { eq } from "drizzle-orm"; +import yaml from "js-yaml"; +import { localDb } from "main/lib/local-db"; +import { workspaceInitManager } from "main/lib/workspace-init-manager"; +import { getWorkspaceRuntimeRegistry } from "main/lib/workspace-runtime"; +import { z } from "zod"; +import { publicProcedure, router } from "../.."; +import { + clearWorkspaceDeletingStatus, + deleteWorkspace, + deleteWorktreeRecord, + getProject, + getWorkspace, + getWorktree, + hideProjectIfNoWorkspaces, + markWorkspaceAsDeleting, + updateActiveWorkspaceIfRemoved, +} from "./utils/db-helpers"; +import { + branchExistsOnRemote, + getCurrentBranch, + getDefaultBranch, +} from "./utils/git"; +import { + addPullRequestConversationComment, + clearGitHubCachesForWorktree, + extractNwoFromUrl, + fetchCheckJobSteps, + fetchGitHubPRStatus, + fetchJobStatuses, + fetchStructuredJobLogs, + getRepoContext, + replyToReviewThread, +} from "./utils/github"; +import { githubSyncService } from "./utils/github/github-sync-service"; +import { GHIdentityCandidatesResponseSchema } from "./utils/github/types"; +import { execWithShellEnv } from "./utils/shell-env"; + +/** + * Fork-local tRPC router that hosts the 19 GitHub Repository Tools procedures + * that upstream #3295 removed from `procedures/git-status.ts`. Keeping them in + * a dedicated namespace lets us adopt upstream's lightweight git-status/github + * helpers while preserving the fork's extended GitHub features. + */ + +// --------------------------------------------------------------------------- +// Schemas +// --------------------------------------------------------------------------- + +const ghRepositoryPullRequestSchema = z.object({ + number: z.number(), + title: z.string(), + url: z.string(), + state: z.enum(["OPEN", "CLOSED", "MERGED"]), + isDraft: z.boolean().optional().default(false), + headRefName: z.string().optional(), + updatedAt: z.string().nullable().optional(), + author: z + .object({ + login: z.string().optional(), + }) + .nullable() + .optional(), +}); + +const ghRepositoryWorkflowSchema = z.object({ + id: z.number(), + name: z.string(), + path: z.string().optional(), + state: z.string().optional(), +}); + +const ghRepositoryWorkflowsResponseSchema = z.object({ + workflows: z.array(ghRepositoryWorkflowSchema).optional(), +}); + +const ghRepositoryWorkflowRunSchema = z.object({ + id: z.number(), + name: z.string().nullable().optional(), + display_title: z.string().nullable().optional(), + html_url: z.string().optional(), + status: z.string().nullable().optional(), + conclusion: z.string().nullable().optional(), + event: z.string().nullable().optional(), + created_at: z.string().nullable().optional(), + updated_at: z.string().nullable().optional(), + run_started_at: z.string().nullable().optional(), + head_branch: z.string().nullable().optional(), + head_sha: z.string().nullable().optional(), + run_number: z.number().optional(), + workflow_id: z.number().optional(), +}); + +const ghRepositoryWorkflowRunsResponseSchema = z.object({ + workflow_runs: z.array(ghRepositoryWorkflowRunSchema).optional(), +}); + +const ghRepositoryLabelSchema = z.object({ + name: z.string(), + color: z.string().optional(), + description: z.string().nullable().optional(), +}); + +const ghRepositoryAssigneeSchema = z.object({ + login: z.string(), + avatar_url: z.string().optional(), +}); + +// --------------------------------------------------------------------------- +// Interfaces +// --------------------------------------------------------------------------- + +interface WorkflowDispatchInput { + name: string; + description: string; + required: boolean; + default: string; + type: "string" | "choice" | "boolean" | "number" | "environment"; + options: string[]; +} + +interface WorkflowDispatchInfo { + supportsDispatch: boolean; + inputs: WorkflowDispatchInput[]; +} + +// --------------------------------------------------------------------------- +// Helper functions +// --------------------------------------------------------------------------- + +async function loadGitHubOverviewSegment({ + label, + load, + fallback, + workspaceId, + repositoryNameWithOwner, +}: { + label: string; + load: () => Promise; + fallback: T; + workspaceId: string; + repositoryNameWithOwner: string; +}): Promise { + try { + return await load(); + } catch (error) { + // Overview data is best-effort. A flaky GitHub endpoint should degrade this + // segment to empty data rather than failing the entire repository overview. + console.warn("[git-status/github-overview] Falling back to empty segment", { + label, + workspaceId, + repositoryNameWithOwner, + error, + }); + return fallback; + } +} + +function sanitizeIssueAssetBasename(value: string): string { + return value + .toLowerCase() + .replace(/[^a-z0-9._-]+/g, "-") + .replace(/-+/g, "-") + .replace(/^-|-$/g, "") + .slice(0, 80); +} + +function getIssueAssetExtension({ + filename, + mimeType, +}: { + filename?: string; + mimeType?: string; +}): string { + const lower = filename?.toLowerCase() ?? ""; + if (lower.endsWith(".png")) return "png"; + if (lower.endsWith(".jpg") || lower.endsWith(".jpeg")) return "jpg"; + if (lower.endsWith(".gif")) return "gif"; + if (lower.endsWith(".webp")) return "webp"; + + if (mimeType === "image/jpeg") return "jpg"; + if (mimeType === "image/gif") return "gif"; + if (mimeType === "image/webp") return "webp"; + return "png"; +} + +async function ensureGitHubBranchExists({ + repoPath, + repositoryNameWithOwner, + branchName, + baseBranch, +}: { + repoPath: string; + repositoryNameWithOwner: string; + branchName: string; + baseBranch: string; +}) { + try { + await execWithShellEnv( + "gh", + ["api", `repos/${repositoryNameWithOwner}/git/ref/heads/${branchName}`], + { cwd: repoPath }, + ); + return; + } catch (error) { + const errorText = + error instanceof Error + ? [ + error.message, + "stderr" in error && typeof error.stderr === "string" + ? error.stderr + : "", + "stdout" in error && typeof error.stdout === "string" + ? error.stdout + : "", + ] + .join("\n") + .toLowerCase() + : String(error).toLowerCase(); + const isMissingRefError = + errorText.includes("404") || + errorText.includes("not found") || + errorText.includes("no ref found"); + + if (!isMissingRefError) { + console.warn("[git-status] GitHub branch probe failed", { + repoPath, + repositoryNameWithOwner, + branchName, + baseBranch, + error, + }); + throw error; + } + + console.warn("[git-status] GitHub branch not found, creating branch", { + repoPath, + repositoryNameWithOwner, + branchName, + baseBranch, + error, + }); + } + + const { stdout } = await execWithShellEnv( + "gh", + ["api", `repos/${repositoryNameWithOwner}/git/ref/heads/${baseBranch}`], + { cwd: repoPath }, + ); + const raw = JSON.parse(stdout) as { + object?: { sha?: string }; + }; + const sha = raw.object?.sha; + if (!sha) { + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: "Could not determine the base branch SHA for issue assets.", + }); + } + + await execWithShellEnv( + "gh", + [ + "api", + "--method", + "POST", + `repos/${repositoryNameWithOwner}/git/refs`, + "-f", + `ref=refs/heads/${branchName}`, + "-f", + `sha=${sha}`, + ], + { cwd: repoPath }, + ); +} + +function parseRunIdFromActionsUrl(detailsUrl?: string): string | null { + if (!detailsUrl) { + return null; + } + + try { + const url = new URL(detailsUrl); + const match = url.pathname.match(/\/actions\/runs\/(\d+)(?:\/|$)/); + return match?.[1] ?? null; + } catch { + return null; + } +} + +function isGitHubActionsUrl(url?: string): boolean { + return parseRunIdFromActionsUrl(url) !== null; +} + +function parseWorkflowDispatchInfo({ + repoPath, + workflowPath, +}: { + repoPath: string; + workflowPath?: string; +}): WorkflowDispatchInfo { + const noDispatch: WorkflowDispatchInfo = { + supportsDispatch: false, + inputs: [], + }; + + if (!workflowPath) { + return noDispatch; + } + + const absolutePath = path.join(repoPath, workflowPath); + if (!existsSync(absolutePath)) { + return noDispatch; + } + + let content: string; + try { + content = readFileSync(absolutePath, "utf8"); + } catch { + return noDispatch; + } + + const hasDispatch = + /^\s*workflow_dispatch\s*:/m.test(content) || + /^\s*on\s*:\s*workflow_dispatch\s*$/m.test(content) || + /^\s*on\s*:\s*\[[^\]]*\bworkflow_dispatch\b[^\]]*\]/m.test(content); + + if (!hasDispatch) { + return noDispatch; + } + + try { + const parsed = yaml.load(content) as Record | null; + if (!parsed || typeof parsed !== "object") { + return { supportsDispatch: true, inputs: [] }; + } + + const onBlock = parsed.on ?? parsed.true; + if (!onBlock || typeof onBlock !== "object") { + return { supportsDispatch: true, inputs: [] }; + } + + const dispatchBlock = (onBlock as Record) + .workflow_dispatch; + if (!dispatchBlock || typeof dispatchBlock !== "object") { + return { supportsDispatch: true, inputs: [] }; + } + + const rawInputs = (dispatchBlock as Record).inputs; + if (!rawInputs || typeof rawInputs !== "object") { + return { supportsDispatch: true, inputs: [] }; + } + + const inputs: WorkflowDispatchInput[] = Object.entries( + rawInputs as Record, + ).map(([name, value]) => { + const input = (value ?? {}) as Record; + const inputType = String(input.type ?? "string"); + const options: string[] = Array.isArray(input.options) + ? input.options.map(String) + : []; + + return { + name, + description: String(input.description ?? ""), + required: Boolean(input.required ?? false), + default: String(input.default ?? ""), + type: ( + ["string", "choice", "boolean", "number", "environment"] as const + ).includes(inputType as never) + ? (inputType as WorkflowDispatchInput["type"]) + : "string", + options, + }; + }); + + return { supportsDispatch: true, inputs }; + } catch { + return { supportsDispatch: true, inputs: [] }; + } +} + +function resolveRepoPathForWorkspace(workspaceId: string): { + workspace: NonNullable>; + worktree: NonNullable> | null; + repoPath: string; +} { + const workspace = getWorkspace(workspaceId); + if (!workspace) { + throw new TRPCError({ + code: "NOT_FOUND", + message: `Workspace ${workspaceId} not found`, + }); + } + + const worktree = workspace.worktreeId + ? (getWorktree(workspace.worktreeId) ?? null) + : null; + let repoPath: string | null = worktree?.path ?? null; + if (!repoPath && workspace.type === "branch") { + const project = getProject(workspace.projectId); + repoPath = project?.mainRepoPath ?? null; + } + + if (!repoPath) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "GitHub is not available for this workspace.", + }); + } + + return { workspace, worktree, repoPath }; +} + +async function getFreshPullRequestForWorkspace(workspaceId: string): Promise<{ + repoPath: string; + worktree: NonNullable> | null; + pullRequest: NonNullable; +}> { + const { repoPath, worktree } = resolveRepoPathForWorkspace(workspaceId); + clearGitHubCachesForWorktree(repoPath); + const githubStatus = await fetchGitHubPRStatus(repoPath); + const pullRequest = githubStatus?.pr ?? null; + + if (!pullRequest) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: "No pull request found for this workspace.", + }); + } + + return { repoPath, worktree, pullRequest }; +} + +async function resolveRepositoryTargetForWorkspace( + workspaceId: string, +): Promise<{ + repoPath: string; + worktree: NonNullable> | null; + repositoryUrl: string; + repositoryNameWithOwner: string; + upstreamUrl: string; + upstreamNameWithOwner: string; + isFork: boolean; + branchExistsOnRemote: boolean; + currentBranch: string; + defaultBranch: string; +}> { + const { repoPath, worktree } = resolveRepoPathForWorkspace(workspaceId); + const [githubStatus, repoContext, currentBranch, defaultBranch] = + await Promise.all([ + fetchGitHubPRStatus(repoPath), + getRepoContext(repoPath), + getCurrentBranch(repoPath), + getDefaultBranch(repoPath), + ]); + + const repoUrl = githubStatus?.repoUrl ?? repoContext?.repoUrl; + const upstreamUrl = + githubStatus?.upstreamUrl ?? repoContext?.upstreamUrl ?? repoUrl; + const isFork = githubStatus?.isFork ?? repoContext?.isFork ?? false; + const repositoryUrl = repoUrl; + const repositoryNameWithOwner = repositoryUrl + ? extractNwoFromUrl(repositoryUrl) + : null; + const upstreamNameWithOwner = upstreamUrl + ? extractNwoFromUrl(upstreamUrl) + : null; + + if ( + !repoUrl || + !upstreamUrl || + !repositoryUrl || + !repositoryNameWithOwner || + !upstreamNameWithOwner + ) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: "Could not determine the GitHub repository for this workspace.", + }); + } + + if (!currentBranch) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: "Could not determine the current branch for this workspace.", + }); + } + + return { + repoPath, + worktree, + repositoryUrl, + repositoryNameWithOwner, + upstreamUrl, + upstreamNameWithOwner, + isFork, + branchExistsOnRemote: githubStatus?.branchExistsOnRemote ?? false, + currentBranch, + defaultBranch, + }; +} + +async function getGitHubRepositoryOverviewForWorkspace(workspaceId: string) { + const { + repoPath, + repositoryNameWithOwner, + repositoryUrl, + upstreamUrl, + upstreamNameWithOwner, + isFork, + branchExistsOnRemote, + currentBranch, + defaultBranch, + } = await resolveRepositoryTargetForWorkspace(workspaceId); + + const [pullRequests, workflows, labels, assignees] = await Promise.all([ + loadGitHubOverviewSegment({ + label: "pullRequests", + workspaceId, + repositoryNameWithOwner, + fallback: [] as Array>, + load: async () => { + const result = await execWithShellEnv( + "gh", + [ + "pr", + "list", + "--repo", + repositoryNameWithOwner, + "--state", + "open", + "--limit", + "8", + "--json", + "number,title,url,state,isDraft,headRefName,updatedAt,author", + ], + { cwd: repoPath }, + ); + return z + .array(ghRepositoryPullRequestSchema) + .parse(JSON.parse(result.stdout) as unknown); + }, + }), + loadGitHubOverviewSegment({ + label: "workflows", + workspaceId, + repositoryNameWithOwner, + fallback: [] as Array>, + load: async () => { + const result = await execWithShellEnv( + "gh", + [ + "api", + `repos/${repositoryNameWithOwner}/actions/workflows?per_page=100`, + ], + { cwd: repoPath }, + ); + return ( + ghRepositoryWorkflowsResponseSchema.parse( + JSON.parse(result.stdout) as unknown, + ).workflows ?? [] + ); + }, + }), + loadGitHubOverviewSegment({ + label: "labels", + workspaceId, + repositoryNameWithOwner, + fallback: [] as Array>, + load: async () => { + const result = await execWithShellEnv( + "gh", + ["api", `repos/${repositoryNameWithOwner}/labels?per_page=100`], + { cwd: repoPath }, + ); + return z + .array(ghRepositoryLabelSchema) + .parse(JSON.parse(result.stdout) as unknown); + }, + }), + loadGitHubOverviewSegment({ + label: "assignees", + workspaceId, + repositoryNameWithOwner, + fallback: [] as Array>, + load: async () => { + const result = await execWithShellEnv( + "gh", + ["api", `repos/${repositoryNameWithOwner}/assignees?per_page=100`], + { cwd: repoPath }, + ); + return z + .array(ghRepositoryAssigneeSchema) + .parse(JSON.parse(result.stdout) as unknown); + }, + }), + ]); + + return { + repositoryNameWithOwner, + repositoryUrl, + upstreamUrl, + upstreamNameWithOwner, + isFork, + branchExistsOnRemote, + currentBranch, + defaultBranch, + issueAssignees: assignees.map((assignee) => ({ + login: assignee.login, + avatarUrl: assignee.avatar_url ?? null, + })), + issueLabels: labels.map((label) => ({ + name: label.name, + color: label.color ?? "", + description: label.description ?? "", + })), + pullsUrl: `${repositoryUrl}/pulls`, + issuesUrl: `${repositoryUrl}/issues`, + actionsUrl: `${repositoryUrl}/actions`, + newIssueUrl: `${repositoryUrl}/issues/new`, + pullRequests: pullRequests.map((pullRequest) => ({ + number: pullRequest.number, + title: pullRequest.title, + url: pullRequest.url, + state: pullRequest.isDraft ? "draft" : pullRequest.state.toLowerCase(), + headRefName: pullRequest.headRefName ?? "", + updatedAt: pullRequest.updatedAt ?? null, + authorLogin: pullRequest.author?.login ?? null, + })), + workflows: workflows + .filter((workflow) => workflow.state !== "disabled_manually") + .map((workflow) => { + const dispatchInfo = parseWorkflowDispatchInfo({ + repoPath, + workflowPath: workflow.path, + }); + return { + id: workflow.id, + name: workflow.name, + path: workflow.path ?? "", + state: workflow.state ?? "unknown", + supportsDispatch: dispatchInfo.supportsDispatch, + inputs: dispatchInfo.inputs, + }; + }) + .filter((workflow) => workflow.supportsDispatch), + }; +} + +async function createGitHubIssueForWorkspace({ + workspaceId, + title, + body, + assignees, + labels, +}: { + workspaceId: string; + title: string; + body?: string; + assignees?: string[]; + labels?: string[]; +}) { + const { repoPath, repositoryNameWithOwner } = + await resolveRepositoryTargetForWorkspace(workspaceId); + const args = [ + "issue", + "create", + "--repo", + repositoryNameWithOwner, + "--title", + title.trim(), + "--body", + body?.trim() || "", + ]; + const normalizedAssignees = normalizeIdentityList(assignees ?? []); + const normalizedLabels = normalizeIdentityList(labels ?? []); + if (normalizedAssignees.length > 0) { + args.push("--assignee", normalizedAssignees.join(",")); + } + if (normalizedLabels.length > 0) { + args.push("--label", normalizedLabels.join(",")); + } + const { stdout } = await execWithShellEnv("gh", args, { cwd: repoPath }); + + return { + url: stdout.trim(), + }; +} + +async function uploadIssueAssetForWorkspace({ + workspaceId, + filename, + contentBase64, + mimeType, +}: { + workspaceId: string; + filename: string; + contentBase64: string; + mimeType?: string; +}) { + const { repoPath, repositoryNameWithOwner, defaultBranch } = + await resolveRepositoryTargetForWorkspace(workspaceId); + const assetBranch = "superset-issue-assets"; + await ensureGitHubBranchExists({ + repoPath, + repositoryNameWithOwner, + branchName: assetBranch, + baseBranch: defaultBranch, + }); + + const now = new Date(); + const extension = getIssueAssetExtension({ filename, mimeType }); + const basename = + sanitizeIssueAssetBasename(filename.replace(/\.[^.]+$/, "")) || + "pasted-image"; + const timestamp = now.toISOString().replace(/[:.]/g, "-"); + const assetPath = [ + ".superset", + "issue-assets", + String(now.getUTCFullYear()), + String(now.getUTCMonth() + 1).padStart(2, "0"), + `${timestamp}-${basename}.${extension}`, + ].join("/"); + + await execWithShellEnv( + "gh", + [ + "api", + "--method", + "PUT", + `repos/${repositoryNameWithOwner}/contents/${assetPath}`, + "-f", + `message=Add issue asset ${assetPath}`, + "-f", + `content=${contentBase64}`, + "-f", + `branch=${assetBranch}`, + ], + { cwd: repoPath }, + ); + + const assetUrl = `https://github.com/${repositoryNameWithOwner}/raw/${assetBranch}/${assetPath}`; + + return { + name: `${basename}.${extension}`, + url: assetUrl, + markdown: `![${basename}](${assetUrl})`, + }; +} + +async function dispatchGitHubWorkflowForWorkspace({ + workspaceId, + workflowId, + ref, + inputs, +}: { + workspaceId: string; + workflowId: number; + ref?: string; + inputs?: Record; +}) { + const { repoPath, repositoryNameWithOwner, currentBranch, defaultBranch } = + await resolveRepositoryTargetForWorkspace(workspaceId); + const requestedRef = ref?.trim() || currentBranch || defaultBranch; + let targetRef = requestedRef; + if (requestedRef === currentBranch) { + const branchCheck = await branchExistsOnRemote( + repoPath, + currentBranch, + "origin", + ); + if (branchCheck.status !== "exists") { + targetRef = defaultBranch; + } + } + + const args = [ + "api", + "--method", + "POST", + `repos/${repositoryNameWithOwner}/actions/workflows/${workflowId}/dispatches`, + "-f", + `ref=${targetRef}`, + ]; + + if (inputs) { + for (const [key, value] of Object.entries(inputs)) { + args.push("-f", `inputs[${key}]=${value}`); + } + } + + await execWithShellEnv("gh", args, { cwd: repoPath }); + + return { + success: true as const, + ref: targetRef, + dispatchedAt: new Date().toISOString(), + }; +} + +async function getGitHubWorkflowRunsForWorkspace({ + workspaceId, + workflowId, +}: { + workspaceId: string; + workflowId: number; +}) { + const { repoPath, repositoryNameWithOwner } = + await resolveRepositoryTargetForWorkspace(workspaceId); + const { stdout } = await execWithShellEnv( + "gh", + [ + "api", + `repos/${repositoryNameWithOwner}/actions/workflows/${workflowId}/runs?per_page=10&event=workflow_dispatch`, + ], + { cwd: repoPath }, + ); + + const rawRuns = JSON.parse(stdout) as unknown; + const runs = + ghRepositoryWorkflowRunsResponseSchema.parse(rawRuns).workflow_runs ?? []; + + return runs.map((run) => ({ + id: run.id, + name: run.name ?? "", + displayTitle: run.display_title ?? "", + url: run.html_url ?? "", + status: run.status ?? "unknown", + conclusion: run.conclusion ?? null, + event: run.event ?? null, + createdAt: run.created_at ?? null, + updatedAt: run.updated_at ?? null, + runStartedAt: run.run_started_at ?? null, + headBranch: run.head_branch ?? null, + headSha: run.head_sha ?? null, + runNumber: run.run_number ?? null, + workflowId: run.workflow_id ?? workflowId, + })); +} + +async function getWorkflowRunJobsForWorkspace({ + workspaceId, + runId, +}: { + workspaceId: string; + runId: number; +}) { + const { repoPath, repositoryNameWithOwner } = + await resolveRepositoryTargetForWorkspace(workspaceId); + const { stdout } = await execWithShellEnv( + "gh", + [ + "api", + `repos/${repositoryNameWithOwner}/actions/runs/${runId}/jobs?per_page=100`, + ], + { cwd: repoPath }, + ); + + const raw: unknown = JSON.parse(stdout); + const parsed = z + .object({ + jobs: z + .array( + z.object({ + id: z.number(), + name: z.string(), + status: z.string(), + conclusion: z.string().nullable(), + html_url: z.string().nullable().optional(), + }), + ) + .optional(), + }) + .parse(raw); + + return (parsed.jobs ?? []).map((job) => ({ + detailsUrl: job.html_url ?? "", + name: job.name, + status: mapJobStatus(job.status, job.conclusion), + })); +} + +function mapJobStatus( + status: string, + conclusion: string | null, +): "success" | "failure" | "pending" | "skipped" | "cancelled" { + if (status !== "completed") { + return "pending"; + } + switch (conclusion) { + case "success": + return "success"; + case "failure": + case "timed_out": + return "failure"; + case "cancelled": + return "cancelled"; + case "skipped": + return "skipped"; + default: + return "pending"; + } +} + +async function rerunPullRequestChecksForWorkspace({ + workspaceId, + mode, +}: { + workspaceId: string; + mode: "all" | "failed"; +}) { + const { repoPath, worktree, pullRequest } = + await getFreshPullRequestForWorkspace(workspaceId); + const checksToRerun = pullRequest.checks.filter((check) => { + if (!isGitHubActionsUrl(check.url)) { + return false; + } + + if (mode === "failed") { + return check.status === "failure"; + } + + return true; + }); + + if (checksToRerun.length === 0) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: + mode === "failed" + ? "No failed GitHub Actions jobs found for this pull request." + : "No GitHub Actions jobs found for this pull request.", + }); + } + + const runTargets = new Map(); + for (const check of checksToRerun) { + const runId = parseRunIdFromActionsUrl(check.url); + const repositoryNameWithOwner = check.url + ? extractNwoFromUrl(check.url) + : null; + if (!runId || !repositoryNameWithOwner) { + continue; + } + + runTargets.set( + `${repositoryNameWithOwner}:${runId}`, + `${repositoryNameWithOwner}:${runId}`, + ); + } + + if (runTargets.size === 0) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: "No rerunnable GitHub Actions runs were found.", + }); + } + + for (const target of runTargets.values()) { + const [repositoryNameWithOwner, runId] = target.split(":"); + if (!repositoryNameWithOwner || !runId) { + continue; + } + + await execWithShellEnv( + "gh", + [ + "api", + "--method", + "POST", + `repos/${repositoryNameWithOwner}/actions/runs/${runId}/${mode === "failed" ? "rerun-failed-jobs" : "rerun"}`, + ], + { cwd: repoPath }, + ); + } + + clearGitHubCachesForWorktree(repoPath); + if (worktree) { + localDb + .update(worktrees) + .set({ githubStatus: null }) + .where(eq(worktrees.id, worktree.id)) + .run(); + } + + return { + success: true as const, + rerunCount: runTargets.size, + }; +} + +function resolvePullRequestTarget({ + workspaceId, + pullRequestNumber, + pullRequestUrl, +}: { + workspaceId: string; + pullRequestNumber?: number; + pullRequestUrl?: string; +}): { + repoPath: string; + worktree: NonNullable> | null; + repoNameWithOwner: string; + pullRequestNumber: number; +} { + const { repoPath, worktree } = resolveRepoPathForWorkspace(workspaceId); + const repoNameWithOwner = pullRequestUrl + ? extractNwoFromUrl(pullRequestUrl) + : null; + + if (!repoNameWithOwner || !pullRequestNumber) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: "Could not determine the pull request target.", + }); + } + + return { + repoPath, + worktree, + repoNameWithOwner, + pullRequestNumber, + }; +} + +function resolvePullRequestRepoTarget({ + workspaceId, + pullRequestUrl, +}: { + workspaceId: string; + pullRequestUrl?: string; +}): { + repoPath: string; + worktree: NonNullable> | null; + repoNameWithOwner: string; +} { + const { repoPath, worktree } = resolveRepoPathForWorkspace(workspaceId); + const repoNameWithOwner = pullRequestUrl + ? extractNwoFromUrl(pullRequestUrl) + : null; + + if (!repoNameWithOwner) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: "Could not determine the pull request repository.", + }); + } + + return { + repoPath, + worktree, + repoNameWithOwner, + }; +} + +function normalizeIdentityList(values: string[]): string[] { + return Array.from( + new Set(values.map((value) => value.trim()).filter(Boolean)), + ); +} + +async function updatePullRequestMembers({ + workspaceId, + kind, + add, + remove, + pullRequestNumber, + pullRequestUrl, +}: { + workspaceId: string; + kind: "reviewer" | "assignee"; + add: string[]; + remove: string[]; + pullRequestNumber?: number; + pullRequestUrl?: string; +}): Promise<{ success: true }> { + const normalizedAdd = normalizeIdentityList(add); + const normalizedRemove = normalizeIdentityList(remove); + + if (normalizedAdd.length === 0 && normalizedRemove.length === 0) { + return { success: true }; + } + + const { + repoPath, + worktree, + repoNameWithOwner, + pullRequestNumber: resolvedPr, + } = resolvePullRequestTarget({ + workspaceId, + pullRequestNumber, + pullRequestUrl, + }); + + const args = ["pr", "edit", String(resolvedPr), "--repo", repoNameWithOwner]; + + if (normalizedAdd.length > 0) { + args.push( + kind === "reviewer" ? "--add-reviewer" : "--add-assignee", + normalizedAdd.join(","), + ); + } + + if (normalizedRemove.length > 0) { + args.push( + kind === "reviewer" ? "--remove-reviewer" : "--remove-assignee", + normalizedRemove.join(","), + ); + } + + await execWithShellEnv("gh", args, { cwd: repoPath }); + clearGitHubCachesForWorktree(repoPath); + + if (worktree) { + localDb + .update(worktrees) + .set({ githubStatus: null }) + .where(eq(worktrees.id, worktree.id)) + .run(); + } + + return { success: true }; +} + +async function getPullRequestIdentityCandidatesHelper({ + workspaceId, + kind, + pullRequestUrl, +}: { + workspaceId: string; + kind: "reviewer" | "assignee"; + pullRequestUrl?: string; +}): Promise> { + const { repoPath, repoNameWithOwner } = resolvePullRequestRepoTarget({ + workspaceId, + pullRequestUrl, + }); + + const [owner, name] = repoNameWithOwner.split("/"); + if (!owner || !name) { + return []; + } + + const fieldName = + kind === "assignee" ? "assignableUsers" : "mentionableUsers"; + const query = `query PullRequestIdentityCandidates($owner: String!, $name: String!, $after: String) { + repository(owner: $owner, name: $name) { + users: ${fieldName}(first: 100, after: $after) { + nodes { + login + avatarUrl + } + pageInfo { + hasNextPage + endCursor + } + } + } +}`; + + const usersByLogin = new Map(); + let afterCursor: string | null = null; + + while (true) { + const args = [ + "api", + "graphql", + "-f", + `query=${query}`, + "-F", + `owner=${owner}`, + "-F", + `name=${name}`, + ]; + if (afterCursor) { + args.push("-F", `after=${afterCursor}`); + } + + const { stdout } = await execWithShellEnv("gh", args, { cwd: repoPath }); + const raw = JSON.parse(stdout) as unknown; + const parsed = GHIdentityCandidatesResponseSchema.safeParse(raw); + if (!parsed.success) { + console.warn( + "[GitHub] Failed to parse pull request identity candidates:", + parsed.error.message, + ); + break; + } + + const users = parsed.data.data.repository?.users; + if (!users) { + break; + } + + for (const user of users.nodes ?? []) { + if (user?.login) { + usersByLogin.set(user.login, user.avatarUrl ?? null); + } + } + + if (!users.pageInfo.hasNextPage || !users.pageInfo.endCursor) { + break; + } + + afterCursor = users.pageInfo.endCursor; + } + + return [...usersByLogin.entries()].map(([login, avatarUrl]) => ({ + login, + avatarUrl, + })); +} + +// --------------------------------------------------------------------------- +// Router +// --------------------------------------------------------------------------- + +export const createGithubExtendedRouter = () => + router({ + getPullRequestIdentityCandidates: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + kind: z.enum(["reviewer", "assignee"]), + pullRequestUrl: z.string().optional(), + }), + ) + .query(async ({ input }) => { + return getPullRequestIdentityCandidatesHelper(input); + }), + + getGitHubRepositoryOverview: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + }), + ) + .query(async ({ input }) => { + return getGitHubRepositoryOverviewForWorkspace(input.workspaceId); + }), + + createGitHubIssue: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + title: z.string().trim().min(1), + body: z.string().optional(), + assignees: z.array(z.string()).optional(), + labels: z.array(z.string()).optional(), + }), + ) + .mutation(async ({ input }) => { + return createGitHubIssueForWorkspace(input); + }), + + uploadGitHubIssueAsset: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + filename: z.string().trim().min(1), + contentBase64: z.string().trim().min(1), + mimeType: z.string().optional(), + }), + ) + .mutation(async ({ input }) => { + return uploadIssueAssetForWorkspace(input); + }), + + dispatchGitHubWorkflow: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + workflowId: z.number().int().positive(), + ref: z.string().optional(), + inputs: z.record(z.string(), z.string()).optional(), + }), + ) + .mutation(async ({ input }) => { + return dispatchGitHubWorkflowForWorkspace(input); + }), + + getGitHubWorkflowRuns: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + workflowId: z.number().int().positive(), + }), + ) + .query(async ({ input }) => { + return getGitHubWorkflowRunsForWorkspace(input); + }), + + getWorkflowRunJobs: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + runId: z.number().int().positive(), + }), + ) + .query(async ({ input }) => { + return getWorkflowRunJobsForWorkspace(input); + }), + + rerunPullRequestChecks: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + mode: z.enum(["all", "failed"]), + }), + ) + .mutation(async ({ input }) => { + return rerunPullRequestChecksForWorkspace(input); + }), + + setPullRequestDraftState: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + isDraft: z.boolean(), + }), + ) + .mutation(async ({ input }) => { + const { repoPath, worktree, pullRequest } = + await getFreshPullRequestForWorkspace(input.workspaceId); + + const isCurrentlyDraft = pullRequest.state === "draft"; + if (pullRequest.state !== "draft" && pullRequest.state !== "open") { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: + "Only open or draft pull requests can be updated from Review.", + }); + } + + if (input.isDraft === isCurrentlyDraft) { + return { success: true }; + } + + const repoNameWithOwner = extractNwoFromUrl(pullRequest.url); + if (!repoNameWithOwner) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: "Could not determine the pull request repository.", + }); + } + + const args = [ + "pr", + "ready", + String(pullRequest.number), + "--repo", + repoNameWithOwner, + ]; + if (input.isDraft) { + args.push("--undo"); + } + + await execWithShellEnv("gh", args, { cwd: repoPath }); + clearGitHubCachesForWorktree(repoPath); + + if (worktree) { + localDb + .update(worktrees) + .set({ githubStatus: null }) + .where(eq(worktrees.id, worktree.id)) + .run(); + } + + return { success: true }; + }), + + setPullRequestThreadResolution: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + threadId: z.string().min(1), + isResolved: z.boolean(), + }), + ) + .mutation(async ({ input }) => { + const { repoPath, worktree } = resolveRepoPathForWorkspace( + input.workspaceId, + ); + const mutationName = input.isResolved + ? "resolveReviewThread" + : "unresolveReviewThread"; + const mutationQuery = `mutation ${mutationName}($threadId: ID!) { + ${mutationName}(input: { threadId: $threadId }) { + thread { + id + isResolved + } + } +}`; + + await execWithShellEnv( + "gh", + [ + "api", + "graphql", + "-f", + `query=${mutationQuery}`, + "-F", + `threadId=${input.threadId}`, + ], + { cwd: repoPath }, + ); + + clearGitHubCachesForWorktree(repoPath); + if (worktree) { + localDb + .update(worktrees) + .set({ githubStatus: null }) + .where(eq(worktrees.id, worktree.id)) + .run(); + } + + return { success: true }; + }), + + replyToPullRequestComment: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + body: z.string().trim().min(1), + threadId: z.string().min(1).optional(), + pullRequestNumber: z.number().int().positive().optional(), + pullRequestUrl: z.string().optional(), + }), + ) + .mutation(async ({ input }) => { + const { repoPath, worktree } = resolveRepoPathForWorkspace( + input.workspaceId, + ); + + if (input.threadId) { + await replyToReviewThread({ + worktreePath: repoPath, + threadId: input.threadId, + body: input.body, + }); + } else { + const githubStatus = await fetchGitHubPRStatus(repoPath); + const pullRequestNumber = + input.pullRequestNumber ?? githubStatus?.pr?.number; + if (!pullRequestNumber) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: "No pull request found for this workspace.", + }); + } + + const prUrl = input.pullRequestUrl ?? githubStatus?.pr?.url; + const repoNameWithOwner = prUrl + ? extractNwoFromUrl(prUrl) + : githubStatus?.repoUrl + ? extractNwoFromUrl(githubStatus.repoUrl) + : null; + if (!repoNameWithOwner) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: + "Could not determine the repository for this pull request.", + }); + } + + await addPullRequestConversationComment({ + worktreePath: repoPath, + repoNameWithOwner, + pullRequestNumber, + body: input.body, + }); + } + + clearGitHubCachesForWorktree(repoPath); + if (worktree) { + localDb + .update(worktrees) + .set({ githubStatus: null }) + .where(eq(worktrees.id, worktree.id)) + .run(); + } + + return { success: true }; + }), + + updatePullRequestReviewers: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + add: z.array(z.string()).optional().default([]), + remove: z.array(z.string()).optional().default([]), + pullRequestNumber: z.number().int().positive().optional(), + pullRequestUrl: z.string().optional(), + }), + ) + .mutation(async ({ input }) => { + return updatePullRequestMembers({ + workspaceId: input.workspaceId, + kind: "reviewer", + add: input.add, + remove: input.remove, + pullRequestNumber: input.pullRequestNumber, + pullRequestUrl: input.pullRequestUrl, + }); + }), + + updatePullRequestAssignees: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + add: z.array(z.string()).optional().default([]), + remove: z.array(z.string()).optional().default([]), + pullRequestNumber: z.number().int().positive().optional(), + pullRequestUrl: z.string().optional(), + }), + ) + .mutation(async ({ input }) => { + return updatePullRequestMembers({ + workspaceId: input.workspaceId, + kind: "assignee", + add: input.add, + remove: input.remove, + pullRequestNumber: input.pullRequestNumber, + pullRequestUrl: input.pullRequestUrl, + }); + }), + + getMissingWorktrees: publicProcedure + .input(z.object({ projectId: z.string() })) + .query(({ input }) => { + const projectWorktrees = localDb + .select({ + id: worktrees.id, + path: worktrees.path, + branch: worktrees.branch, + }) + .from(worktrees) + .where(eq(worktrees.projectId, input.projectId)) + .all(); + + return projectWorktrees + .filter((wt) => !existsSync(wt.path)) + .map((wt) => ({ + worktreeId: wt.id, + path: wt.path, + branch: wt.branch, + })); + }), + + cleanupMissingWorktrees: publicProcedure + .input(z.object({ projectId: z.string() })) + .mutation(async ({ input }) => { + const projectWorktrees = localDb + .select() + .from(worktrees) + .where(eq(worktrees.projectId, input.projectId)) + .all(); + + const missing = projectWorktrees.filter((wt) => !existsSync(wt.path)); + + let removed = 0; + for (const wt of missing) { + const relatedWorkspaces = localDb + .select() + .from(workspaces) + .where(eq(workspaces.worktreeId, wt.id)) + .all(); + + // Tear down runtime state for each workspace before deleting DB rows. + // The worktree files are already gone (hence "missing"), so we skip + // disk teardown scripts but still kill terminals, unregister GitHub + // sync, and update active-workspace/project visibility so the app + // doesn't retain orphan state. + for (const ws of relatedWorkspaces) { + markWorkspaceAsDeleting(ws.id); + updateActiveWorkspaceIfRemoved(ws.id); + + try { + await getWorkspaceRuntimeRegistry() + .getForWorkspaceId(ws.id) + .terminal.killByWorkspaceId(ws.id); + } catch (err) { + console.warn( + `[cleanupMissingWorktrees] terminal kill failed for workspace ${ws.id}:`, + err, + ); + } + + githubSyncService.unregisterWorkspace(wt.path); + workspaceInitManager.clearJob(ws.id); + + deleteWorkspace(ws.id); + clearWorkspaceDeletingStatus(ws.id); + } + + deleteWorktreeRecord(wt.id); + hideProjectIfNoWorkspaces(input.projectId); + removed++; + } + + return { removed }; + }), + + getCheckJobSteps: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + detailsUrl: z.string(), + }), + ) + .query(async ({ input }) => { + const workspace = getWorkspace(input.workspaceId); + if (!workspace) { + return []; + } + + const worktree = workspace.worktreeId + ? getWorktree(workspace.worktreeId) + : null; + + let repoPath: string | null = worktree?.path ?? null; + if (!repoPath && workspace.type === "branch") { + const project = getProject(workspace.projectId); + repoPath = project?.mainRepoPath ?? null; + } + if (!repoPath) { + return []; + } + + return fetchCheckJobSteps(repoPath, input.detailsUrl); + }), + + getJobLogs: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + detailsUrl: z.string(), + }), + ) + .query(async ({ input }) => { + const workspace = getWorkspace(input.workspaceId); + if (!workspace) { + return { + jobStatus: "queued" as const, + jobConclusion: null, + steps: [], + }; + } + + const worktree = workspace.worktreeId + ? getWorktree(workspace.worktreeId) + : null; + + let repoPath: string | null = worktree?.path ?? null; + if (!repoPath && workspace.type === "branch") { + const project = getProject(workspace.projectId); + repoPath = project?.mainRepoPath ?? null; + } + if (!repoPath) { + return { + jobStatus: "queued" as const, + jobConclusion: null, + steps: [], + }; + } + + return fetchStructuredJobLogs(repoPath, input.detailsUrl); + }), + + getJobStatuses: publicProcedure + .input( + z.object({ + workspaceId: z.string(), + detailsUrls: z.array(z.string()), + }), + ) + .query(async ({ input }) => { + const workspace = getWorkspace(input.workspaceId); + if (!workspace) { + return []; + } + + const worktree = workspace.worktreeId + ? getWorktree(workspace.worktreeId) + : null; + + let repoPath: string | null = worktree?.path ?? null; + if (!repoPath && workspace.type === "branch") { + const project = getProject(workspace.projectId); + repoPath = project?.mainRepoPath ?? null; + } + if (!repoPath) { + return []; + } + + return fetchJobStatuses(repoPath, input.detailsUrls); + }), + + /** + * Notify the SyncService which workspace is currently active. + * Deactivates all other workspaces to stop their polling timers. + * Pass empty workspaceId to deactivate all (e.g., dashboard view). + */ + setActiveSyncWorkspace: publicProcedure + .input(z.object({ workspaceId: z.string() })) + .mutation(({ input }) => { + if (!input.workspaceId) { + githubSyncService.deactivateAll(); + return { success: true }; + } + + const workspace = getWorkspace(input.workspaceId); + if (!workspace) return { success: false }; + + const worktree = workspace.worktreeId + ? getWorktree(workspace.worktreeId) + : null; + + let repoPath: string | null = worktree?.path ?? null; + if (!repoPath && workspace.type === "branch") { + const project = getProject(workspace.projectId); + repoPath = project?.mainRepoPath ?? null; + } + if (!repoPath) return { success: false }; + + githubSyncService.setActiveWorkspace(repoPath); + return { success: true }; + }), + }); + +export type GithubExtendedRouter = ReturnType< + typeof createGithubExtendedRouter +>; diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/procedures/delete.ts b/apps/desktop/src/lib/trpc/routers/workspaces/procedures/delete.ts index fc4a0729296..edfd08d4691 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/procedures/delete.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/procedures/delete.ts @@ -27,6 +27,7 @@ import { listExternalWorktrees, worktreeExists, } from "../utils/git"; +import { githubSyncService } from "../utils/github/github-sync-service"; import { removeWorktreeFromDisk, runTeardown } from "../utils/teardown"; const normalizePath = (p: string): string => { @@ -325,6 +326,12 @@ export const createDeleteProcedures = () => { } } + // Stop SyncService polling for this workspace + const repoPath = worktree?.path ?? project?.mainRepoPath; + if (repoPath) { + githubSyncService.unregisterWorkspace(repoPath); + } + deleteWorkspace(input.id); if (worktree) { @@ -360,6 +367,19 @@ export const createDeleteProcedures = () => { .getForWorkspaceId(input.id) .terminal.killByWorkspaceId(input.id); + // Stop SyncService polling for this workspace + if (workspace.worktreeId) { + const wt = getWorktree(workspace.worktreeId); + if (wt?.path) { + githubSyncService.unregisterWorkspace(wt.path); + } + } else { + const proj = getProject(workspace.projectId); + if (proj?.mainRepoPath) { + githubSyncService.unregisterWorkspace(proj.mainRepoPath); + } + } + deleteWorkspace(input.id); hideProjectIfNoWorkspaces(workspace.projectId); updateActiveWorkspaceIfRemoved(input.id); diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/procedures/git-status.ts b/apps/desktop/src/lib/trpc/routers/workspaces/procedures/git-status.ts index c3229e1dc1e..6157b85a103 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/procedures/git-status.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/procedures/git-status.ts @@ -22,17 +22,20 @@ import { clearGitHubCachesForWorktree, fetchGitHubPRComments, fetchGitHubPRStatus, + fetchGitHubPreviewUrl, type PullRequestCommentsTarget, - resolveReviewThread, } from "../utils/github"; +import { githubSyncService } from "../utils/github/github-sync-service"; import { getWorkspacePath } from "../utils/worktree"; const gitHubPRCommentsInputSchema = z.object({ workspaceId: z.string(), prNumber: z.number().int().positive().optional(), + prUrl: z.string().optional(), repoUrl: z.string().optional(), upstreamUrl: z.string().optional(), isFork: z.boolean().optional(), + forceFresh: z.boolean().optional(), }); function resolveCommentsPullRequestTarget({ @@ -60,6 +63,7 @@ function resolveCommentsPullRequestTarget({ return { prNumber, + prUrl: input.prUrl ?? githubStatus?.pr?.url, repoContext: { repoUrl, upstreamUrl, @@ -84,7 +88,7 @@ function hasMeaningfulGitHubStatusChange({ next, }: { current: GitHubStatus | null | undefined; - next: GitHubStatus; + next: GitHubStatus | null; }): boolean { return ( JSON.stringify(stripGitHubStatusTimestamp(current)) !== @@ -92,6 +96,13 @@ function hasMeaningfulGitHubStatusChange({ ); } +// Initialize the SyncService with fetch dependencies (idempotent) +githubSyncService.initialize({ + fetchPRStatus: fetchGitHubPRStatus, + fetchPRComments: ({ worktreePath, pullRequest }) => + fetchGitHubPRComments({ worktreePath, pullRequest }), +}); + export const createGitStatusProcedures = () => { return router({ refreshGitStatus: publicProcedure @@ -176,7 +187,13 @@ export const createGitStatusProcedures = () => { }), getGitHubStatus: publicProcedure - .input(z.object({ workspaceId: z.string() })) + .input( + z.object({ + workspaceId: z.string(), + forceFresh: z.boolean().optional(), + includePreview: z.boolean().optional(), + }), + ) .query(async ({ input }) => { const workspace = getWorkspace(input.workspaceId); if (!workspace) { @@ -188,29 +205,52 @@ export const createGitStatusProcedures = () => { return null; } + if (input.forceFresh) { + clearGitHubCachesForWorktree(repoPath); + } + + // Register workspace with SyncService for proactive cache warming + if (!githubSyncService.isRegistered(repoPath)) { + githubSyncService.registerWorkspace(repoPath); + } + const branchOverride = workspace.type === "branch" ? workspace.branch : null; const freshStatus = await fetchGitHubPRStatus(repoPath, branchOverride); - if (freshStatus && workspace.worktreeId) { - const worktree = getWorktree(workspace.worktreeId); - if ( - worktree && - hasMeaningfulGitHubStatusChange({ - current: worktree.githubStatus, - next: freshStatus, - }) - ) { - localDb - .update(worktrees) - .set({ githubStatus: freshStatus }) - .where(eq(worktrees.id, workspace.worktreeId)) - .run(); - } + const worktree = workspace.worktreeId + ? getWorktree(workspace.worktreeId) + : null; + + if ( + worktree && + hasMeaningfulGitHubStatusChange({ + current: worktree.githubStatus, + next: freshStatus, + }) + ) { + localDb + .update(worktrees) + .set({ githubStatus: freshStatus }) + .where(eq(worktrees.id, worktree.id)) + .run(); } - return freshStatus; + if (!input.includePreview || !freshStatus) { + return freshStatus; + } + + const previewUrl = await fetchGitHubPreviewUrl({ + worktreePath: repoPath, + githubStatus: freshStatus, + forceFresh: input.forceFresh, + }); + + return { + ...freshStatus, + previewUrl: previewUrl ?? undefined, + }; }), getGitHubPRComments: publicProcedure @@ -226,6 +266,10 @@ export const createGitStatusProcedures = () => { return []; } + if (input.forceFresh) { + clearGitHubCachesForWorktree(repoPath); + } + const worktree = workspace.worktreeId ? getWorktree(workspace.worktreeId) : null; @@ -241,36 +285,6 @@ export const createGitStatusProcedures = () => { }); }), - resolveReviewThread: publicProcedure - .input( - z.object({ - workspaceId: z.string(), - threadId: z.string(), - resolve: z.boolean(), - }), - ) - .mutation(async ({ input }) => { - const workspace = getWorkspace(input.workspaceId); - if (!workspace) { - throw new Error(`Workspace ${input.workspaceId} not found`); - } - - const repoPath = getWorkspacePath(workspace); - if (!repoPath) { - throw new Error( - `Could not resolve path for workspace ${input.workspaceId}`, - ); - } - - await resolveReviewThread({ - worktreePath: repoPath, - threadId: input.threadId, - resolve: input.resolve, - }); - - clearGitHubCachesForWorktree(repoPath); - }), - getWorktreeInfo: publicProcedure .input(z.object({ workspaceId: z.string() })) .query(({ input }) => { diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/procedures/query.ts b/apps/desktop/src/lib/trpc/routers/workspaces/procedures/query.ts index 2bd22f3e28f..fb8eaf9489c 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/procedures/query.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/procedures/query.ts @@ -1,5 +1,6 @@ import { projects, + type SelectProject, workspaceSections, workspaces, worktrees, @@ -9,6 +10,7 @@ import { eq, isNotNull, isNull } from "drizzle-orm"; import { localDb } from "main/lib/local-db"; import { z } from "zod"; import { publicProcedure, router } from "../../.."; +import { fetchGitHubOwner } from "../../projects/utils/github"; import { getWorkspace } from "../utils/db-helpers"; import { getProjectChildItems } from "../utils/project-children-order"; import { loadSetupConfig } from "../utils/setup"; @@ -36,6 +38,30 @@ function getWorkspacesInVisualOrder(): string[] { return computeVisualOrder(activeProjects, allWorkspaces, allSections); } +async function ensureProjectHasGitHubOwner( + project: SelectProject, +): Promise { + if (project.githubOwner) { + return project; + } + + const githubOwner = await fetchGitHubOwner(project.mainRepoPath); + if (!githubOwner) { + return project; + } + + localDb + .update(projects) + .set({ githubOwner }) + .where(eq(projects.id, project.id)) + .run(); + + return { + ...project, + githubOwner, + }; +} + export const createQueryProcedures = () => { return router({ get: publicProcedure @@ -54,6 +80,9 @@ export const createQueryProcedures = () => { .from(projects) .where(eq(projects.id, workspace.projectId)) .get(); + const resolvedProject = project + ? await ensureProjectHasGitHubOwner(project) + : null; const worktree = workspace.worktreeId ? localDb .select() @@ -66,13 +95,13 @@ export const createQueryProcedures = () => { ...workspace, type: workspace.type as "worktree" | "branch", worktreePath: getWorkspacePath(workspace) ?? "", - project: project + project: resolvedProject ? { - id: project.id, - name: project.name, - mainRepoPath: project.mainRepoPath, - githubOwner: project.githubOwner ?? null, - defaultBranch: project.defaultBranch ?? null, + id: resolvedProject.id, + name: resolvedProject.name, + mainRepoPath: resolvedProject.mainRepoPath, + githubOwner: resolvedProject.githubOwner ?? null, + defaultBranch: resolvedProject.defaultBranch ?? null, } : null, worktree: worktree @@ -95,7 +124,7 @@ export const createQueryProcedures = () => { .sort((a, b) => a.tabOrder - b.tabOrder); }), - getAllGrouped: publicProcedure.query(() => { + getAllGrouped: publicProcedure.query(async () => { type WorkspaceItem = { id: string; projectId: string; @@ -135,6 +164,9 @@ export const createQueryProcedures = () => { .from(projects) .where(isNotNull(projects.tabOrder)) .all(); + const resolvedProjects = await Promise.all( + activeProjects.map((project) => ensureProjectHasGitHubOwner(project)), + ); const allWorktrees = localDb.select().from(worktrees).all(); const worktreePathMap: WorktreePathMap = new Map( @@ -166,7 +198,7 @@ export const createQueryProcedures = () => { } >(); - for (const project of activeProjects) { + for (const project of resolvedProjects) { const projectSections = allSections .filter((s) => s.projectId === project.id) .sort((a, b) => a.tabOrder - b.tabOrder) diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/base-branch-config.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/base-branch-config.ts index c70671e0692..65964d2c6c8 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/base-branch-config.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/base-branch-config.ts @@ -15,6 +15,10 @@ interface BranchBaseConfig { isExplicit: boolean; } +export interface BranchPullRequestBaseRepoConfig { + baseRepoUrl: string | null; +} + function parseBooleanConfig(value: string): boolean { const normalized = value.trim().toLowerCase(); return ( @@ -78,3 +82,36 @@ export async function unsetBranchBaseConfig({ .catch(() => {}), ]); } + +export async function getBranchPullRequestBaseRepoConfig({ + repoPath, + branch, +}: BranchConfigParams): Promise { + const git = await getSimpleGitWithShellPath(repoPath); + const baseRepoOutput = await git + .raw(["config", `branch.${branch}.pr-base-repo`]) + .catch(() => ""); + + return { + baseRepoUrl: baseRepoOutput.trim() || null, + }; +} + +export async function setBranchPullRequestBaseRepoConfig({ + repoPath, + branch, + baseRepoUrl, +}: BranchConfigParams & { baseRepoUrl: string }): Promise { + const git = await getSimpleGitWithShellPath(repoPath); + await git.raw(["config", `branch.${branch}.pr-base-repo`, baseRepoUrl]); +} + +export async function unsetBranchPullRequestBaseRepoConfig({ + repoPath, + branch, +}: BranchConfigParams): Promise { + const git = await getSimpleGitWithShellPath(repoPath); + await git + .raw(["config", "--unset", `branch.${branch}.pr-base-repo`]) + .catch(() => {}); +} diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/git-client.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/git-client.ts index 86417b35482..9ae719ba032 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/git-client.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/git-client.ts @@ -1,32 +1,144 @@ import { + type ExecFileOptions, type ExecFileOptionsWithStringEncoding, execFile, } from "node:child_process"; -import { promisify } from "node:util"; -import simpleGit, { type SimpleGit } from "simple-git"; +import { access } from "node:fs/promises"; +import { + buildSimpleGitUnsafeOptions, + type SimpleGitUnsafeOptions, +} from "@superset/shared/simple-git-unsafe"; +import simpleGit, { + type SimpleGit, + type SimpleGitProgressEvent, +} from "simple-git"; import { getProcessEnvWithShellPath } from "./shell-env"; -const execFileAsync = promisify(execFile); +/** + * Thrown when a git operation is requested against a path that no longer + * exists on disk (typically a worktree that was deleted externally while + * Superset's background polling kept firing). Caught by the tRPC Sentry + * middleware and NOT reported — this is an expected race, not a bug. + * + * Sentry dashboard ELECTRON-26 / ELECTRON-1Z were 5000+ occurrences of + * this case on Superset@1.4.7 before dedicated handling existed. + */ +export class WorktreePathMissingError extends Error { + constructor(public readonly repoPath: string) { + super(`Worktree path no longer exists: ${repoPath}`); + this.name = "WorktreePathMissingError"; + } +} + +interface CreateSimpleGitWithShellPathOptions { + abort?: AbortSignal; + baseEnv?: NodeJS.ProcessEnv; + progress?: (event: SimpleGitProgressEvent) => void; + repoPath?: string; +} + +function createSimpleGitWithEnv( + env: Record, + options: Omit = {}, +): SimpleGit { + const unsafe = buildSimpleGitUnsafeOptions(env); + const gitOptions: { + abort?: AbortSignal; + baseDir?: string; + progress?: (event: SimpleGitProgressEvent) => void; + unsafe?: SimpleGitUnsafeOptions; + } = {}; + + if (options.abort) { + gitOptions.abort = options.abort; + } + if (options.progress) { + gitOptions.progress = options.progress; + } + if (options.repoPath) { + gitOptions.baseDir = options.repoPath; + } + if (unsafe) { + gitOptions.unsafe = unsafe; + } + + const git = + Object.keys(gitOptions).length > 0 + ? simpleGit(gitOptions as never) + : simpleGit(); + git.env(env); + return git; +} + +export async function createSimpleGitWithShellPath( + options: CreateSimpleGitWithShellPathOptions = {}, +): Promise { + if (options.repoPath) { + try { + await access(options.repoPath); + } catch { + // Surface a dedicated error so callers (and the Sentry middleware) + // can recognise the "worktree deleted externally" race and handle + // it gracefully instead of reporting an INTERNAL_SERVER_ERROR. + throw new WorktreePathMissingError(options.repoPath); + } + } + const env = await getProcessEnvWithShellPath(options.baseEnv ?? process.env); + return createSimpleGitWithEnv(env, options); +} export async function getSimpleGitWithShellPath( repoPath?: string, ): Promise { - const git = repoPath ? simpleGit(repoPath) : simpleGit(); - git.env(await getProcessEnvWithShellPath()); - return git; + return createSimpleGitWithShellPath({ repoPath }); } export async function execGitWithShellPath( args: string[], options?: Omit, ): Promise<{ stdout: string; stderr: string }> { + return execGitWithShellPathWithEncoding(args, { + ...options, + encoding: "utf8", + }); +} + +export async function execGitWithShellPathBuffer( + args: string[], + options?: Omit, +): Promise<{ stdout: Buffer; stderr: Buffer }> { + return execGitWithShellPathWithEncoding(args, { + ...options, + encoding: "buffer", + }); +} + +async function execGitWithShellPathWithEncoding< + TEncoding extends BufferEncoding | "buffer", +>( + args: string[], + options: + | (Omit & { encoding: TEncoding }) + | undefined, +): Promise<{ + stdout: TEncoding extends "buffer" ? Buffer : string; + stderr: TEncoding extends "buffer" ? Buffer : string; +}> { const env = await getProcessEnvWithShellPath( options?.env ? { ...process.env, ...options.env } : process.env, ); - return execFileAsync("git", args, { - ...options, - encoding: "utf8", - env, + return new Promise((resolve, reject) => { + execFile("git", args, { ...options, env }, (error, stdout, stderr) => { + if (error) { + reject(error); + return; + } + + resolve({ + stdout: stdout as TEncoding extends "buffer" ? Buffer : string, + stderr: stderr as TEncoding extends "buffer" ? Buffer : string, + }); + }); }); } diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/git.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/git.ts index dc1d834820d..f723ea00efa 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/git.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/git.ts @@ -12,7 +12,11 @@ import { import friendlyWords from "friendly-words"; import type { StatusResult } from "simple-git"; import { runWithPostCheckoutHookTolerance } from "../../utils/git-hook-tolerance"; -import { execGitWithShellPath, getSimpleGitWithShellPath } from "./git-client"; +import { + execGitWithShellPath, + getSimpleGitWithShellPath, + WorktreePathMissingError, +} from "./git-client"; import { execWithShellEnv, getProcessEnvWithShellPath } from "./shell-env"; import { resolveTrackingRemoteName } from "./upstream-ref"; @@ -187,6 +191,16 @@ export async function getStatusNoLock(repoPath: string): Promise { if (stderr.includes("not a git repository")) { throw new NotGitRepoError(repoPath); } + // Externally-deleted worktree: git prints + // `fatal: cannot change to '': No such file or directory` + // before exiting. `cannot change to` だけで判定する (特異的)。 + // NOTE: "No such file or directory" 単独での判定はしない。`-uall` + // スキャン中の untracked dir / submodule 欠落 / ephemeral unlink 等でも + // 同じ文言が出うるため、誤って「全体が消えた worktree」扱いにすると + // 上位の UI がファイル変更の差分表示を落としてしまう。 + if (stderr.includes("cannot change to")) { + throw new WorktreePathMissingError(repoPath); + } } throw new Error( `Failed to get git status: ${error instanceof Error ? error.message : String(error)}`, @@ -409,6 +423,22 @@ export async function getGitAuthorName( } } +export async function getGitAuthorEmail( + repoPath?: string, +): Promise { + try { + const git = await getSimpleGitWithShellPath(repoPath); + const email = await git.getConfig("user.email"); + return email.value?.trim() || null; + } catch (error) { + console.warn( + "[git/getGitAuthorEmail] Failed to read git user.email:", + error, + ); + return null; + } +} + let cachedGitHubUsername: { value: string | null; timestamp: number } | null = null; const GITHUB_USERNAME_CACHE_TTL = 5 * 60 * 1000; // 5 minutes diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/cache.test.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/cache.test.ts index a334ebf747a..dc9f25a1769 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/cache.test.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/cache.test.ts @@ -86,7 +86,7 @@ describe("getCachedGitHubStatusState", () => { try { setCachedGitHubStatus(worktreePath, status); - Date.now = () => 1000 + 10_001; + Date.now = () => 1000 + 30_001; expect(getCachedGitHubStatus(worktreePath)).toBeNull(); expect(getCachedGitHubStatusState(worktreePath)).toEqual({ diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/cache.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/cache.ts index f1712477e25..d9c1f756e1b 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/cache.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/cache.ts @@ -4,15 +4,22 @@ import { type CacheState, createCachedResource, } from "./cached-resource"; +import { recordGitHubCacheMetric } from "./github-metrics"; import type { RepoContext } from "./types"; -const GITHUB_STATUS_CACHE_TTL_MS = 10_000; -const GITHUB_PR_COMMENTS_CACHE_TTL_MS = 30_000; +const GITHUB_STATUS_CACHE_TTL_MS = 30_000; +const GITHUB_PR_COMMENTS_CACHE_TTL_MS = 60_000; +const GITHUB_PREVIEW_URL_CACHE_TTL_MS = 10 * 60 * 1000; const GITHUB_REPO_CONTEXT_CACHE_TTL_MS = 300_000; +const GITHUB_COMMIT_AUTHOR_CACHE_TTL_MS = 300_000; +const GITHUB_NO_PR_MATCH_CACHE_TTL_MS = 120_000; const MAX_GITHUB_STATUS_CACHE_ENTRIES = 256; const MAX_GITHUB_PR_COMMENTS_CACHE_ENTRIES = 512; +const MAX_GITHUB_PREVIEW_URL_CACHE_ENTRIES = 512; const MAX_GITHUB_REPO_CONTEXT_CACHE_ENTRIES = 256; +const MAX_GITHUB_COMMIT_AUTHOR_CACHE_ENTRIES = 2048; +const MAX_GITHUB_NO_PR_MATCH_CACHE_ENTRIES = 512; const githubStatusResource = createCachedResource({ ttlMs: GITHUB_STATUS_CACHE_TTL_MS, @@ -24,15 +31,42 @@ const pullRequestCommentsResource = createCachedResource({ maxEntries: MAX_GITHUB_PR_COMMENTS_CACHE_ENTRIES, }); +const previewUrlResource = createCachedResource({ + ttlMs: GITHUB_PREVIEW_URL_CACHE_TTL_MS, + maxEntries: MAX_GITHUB_PREVIEW_URL_CACHE_ENTRIES, +}); + const repoContextResource = createCachedResource({ ttlMs: GITHUB_REPO_CONTEXT_CACHE_TTL_MS, maxEntries: MAX_GITHUB_REPO_CONTEXT_CACHE_ENTRIES, }); +const noPullRequestMatchResource = createCachedResource({ + ttlMs: GITHUB_NO_PR_MATCH_CACHE_TTL_MS, + maxEntries: MAX_GITHUB_NO_PR_MATCH_CACHE_ENTRIES, +}); + +export interface GitHubCommitAuthor { + login: string | null; + avatarUrl: string | null; +} + +const commitAuthorResource = createCachedResource({ + ttlMs: GITHUB_COMMIT_AUTHOR_CACHE_TTL_MS, + maxEntries: MAX_GITHUB_COMMIT_AUTHOR_CACHE_ENTRIES, +}); + export function getCachedGitHubStatus( worktreePath: string, ): GitHubStatus | null { - return githubStatusResource.get(worktreePath); + const cachedState = githubStatusResource.getState(worktreePath); + const cached = cachedState?.isFresh ? cachedState.value : null; + recordGitHubCacheMetric({ + kind: "status", + event: cachedState?.isFresh ? "fresh_hit" : "miss", + worktreePath, + }); + return cached; } export function getCachedGitHubStatusState( @@ -46,6 +80,11 @@ export function setCachedGitHubStatus( value: GitHubStatus, ): void { githubStatusResource.set(worktreePath, value); + recordGitHubCacheMetric({ + kind: "status", + event: "write", + worktreePath, + }); } export function readCachedGitHubStatus( @@ -53,9 +92,34 @@ export function readCachedGitHubStatus( load: () => Promise, options?: CachedResourceReadOptions, ): Promise { + const cached = githubStatusResource.getState(worktreePath); + recordGitHubCacheMetric({ + kind: "status", + event: options?.forceFresh + ? "force_fresh" + : cached?.isFresh + ? "fresh_hit" + : cached + ? "stale_hit" + : "miss", + worktreePath, + }); + return githubStatusResource.read(worktreePath, load, { ...options, - shouldCache: options?.shouldCache ?? ((value) => value !== null), + shouldCache: + options?.shouldCache ?? + ((value) => { + const shouldCache = value !== null; + if (shouldCache) { + recordGitHubCacheMetric({ + kind: "status", + event: "write", + worktreePath, + }); + } + return shouldCache; + }), }); } @@ -80,7 +144,14 @@ export function makePullRequestCommentsCacheKey({ export function getCachedPullRequestComments( cacheKey: string, ): PullRequestComment[] | null { - return pullRequestCommentsResource.get(cacheKey); + const cachedState = pullRequestCommentsResource.getState(cacheKey); + const cached = cachedState?.isFresh ? cachedState.value : null; + recordGitHubCacheMetric({ + kind: "comments", + event: cachedState?.isFresh ? "fresh_hit" : "miss", + worktreePath: extractWorktreePathFromCacheKey(cacheKey), + }); + return cached; } export function getCachedPullRequestCommentsState( @@ -94,6 +165,11 @@ export function setCachedPullRequestComments( value: PullRequestComment[], ): void { pullRequestCommentsResource.set(cacheKey, value); + recordGitHubCacheMetric({ + kind: "comments", + event: "write", + worktreePath: extractWorktreePathFromCacheKey(cacheKey), + }); } export function readCachedPullRequestComments( @@ -101,7 +177,129 @@ export function readCachedPullRequestComments( load: () => Promise, options?: CachedResourceReadOptions, ): Promise { - return pullRequestCommentsResource.read(cacheKey, load, options); + const worktreePath = extractWorktreePathFromCacheKey(cacheKey); + const cached = pullRequestCommentsResource.getState(cacheKey); + recordGitHubCacheMetric({ + kind: "comments", + event: options?.forceFresh + ? "force_fresh" + : cached?.isFresh + ? "fresh_hit" + : cached + ? "stale_hit" + : "miss", + worktreePath, + }); + + return pullRequestCommentsResource.read( + cacheKey, + async () => { + const value = await load(); + recordGitHubCacheMetric({ + kind: "comments", + event: "write", + worktreePath, + }); + return value; + }, + options, + ); +} + +export function makeGitHubPreviewCachePrefix(worktreePath: string): string { + return `${worktreePath}::preview::`; +} + +export function makeGitHubNoPullRequestCachePrefix( + worktreePath: string, +): string { + return `${worktreePath}::no-pr::`; +} + +export function makeGitHubNoPullRequestCacheKey({ + worktreePath, + localBranch, + headSha, +}: { + worktreePath: string; + localBranch: string; + headSha?: string; +}): string { + return `${makeGitHubNoPullRequestCachePrefix(worktreePath)}${localBranch}::${headSha ?? "no-head"}`; +} + +export function hasCachedNoPullRequestMatch(cacheKey: string): boolean { + return noPullRequestMatchResource.get(cacheKey) === true; +} + +export function setCachedNoPullRequestMatch(cacheKey: string): void { + noPullRequestMatchResource.set(cacheKey, true); +} + +export function clearCachedNoPullRequestMatch(cacheKey: string): void { + noPullRequestMatchResource.invalidate(cacheKey); +} + +export function makeGitHubPreviewCacheKey({ + worktreePath, + repoNameWithOwner, + branchName, + headSha, + pullRequestNumber, +}: { + worktreePath: string; + repoNameWithOwner: string; + branchName: string; + headSha?: string; + pullRequestNumber?: number | null; +}): string { + return `${makeGitHubPreviewCachePrefix(worktreePath)}${repoNameWithOwner}::${branchName}::${headSha ?? "no-head"}::pr-${pullRequestNumber ?? "none"}`; +} + +export function getCachedGitHubPreviewUrl(cacheKey: string): string | null { + const cachedState = previewUrlResource.getState(cacheKey); + const cached = cachedState?.isFresh ? cachedState.value : null; + recordGitHubCacheMetric({ + kind: "preview", + event: cachedState?.isFresh ? "fresh_hit" : "miss", + worktreePath: extractWorktreePathFromCacheKey(cacheKey), + }); + return cached; +} + +export function readCachedGitHubPreviewUrl( + cacheKey: string, + load: () => Promise, + options?: CachedResourceReadOptions, +): Promise { + const worktreePath = extractWorktreePathFromCacheKey(cacheKey); + const cached = previewUrlResource.getState(cacheKey); + recordGitHubCacheMetric({ + kind: "preview", + event: options?.forceFresh + ? "force_fresh" + : cached?.isFresh + ? "fresh_hit" + : cached + ? "stale_hit" + : "miss", + worktreePath, + }); + + return previewUrlResource.read(cacheKey, load, { + ...options, + // Cache misses too so preview-less branches don't repeatedly hit deployments. + shouldCache: + options?.shouldCache ?? + (() => { + recordGitHubCacheMetric({ + kind: "preview", + event: "write", + worktreePath, + }); + return true; + }), + }); } export function getCachedRepoContext(worktreePath: string): RepoContext | null { @@ -132,10 +330,64 @@ export function readCachedRepoContext( }); } +export function makeGitHubCommitAuthorCacheKey({ + repoNameWithOwner, + commitHash, +}: { + repoNameWithOwner: string; + commitHash: string; +}): string { + return `${repoNameWithOwner}#${commitHash}`; +} + +export function readCachedGitHubCommitAuthor( + cacheKey: string, + load: () => Promise, + options?: CachedResourceReadOptions, +): Promise { + return commitAuthorResource.read(cacheKey, load, options); +} + export function clearGitHubCachesForWorktree(worktreePath: string): void { githubStatusResource.invalidatePrefix(worktreePath); repoContextResource.invalidate(worktreePath); + recordGitHubCacheMetric({ + kind: "status", + event: "invalidate", + worktreePath, + }); + previewUrlResource.invalidatePrefix( + makeGitHubPreviewCachePrefix(worktreePath), + ); + recordGitHubCacheMetric({ + kind: "preview", + event: "invalidate", + worktreePath, + }); pullRequestCommentsResource.invalidatePrefix( makePullRequestCommentsCachePrefix(worktreePath), ); + noPullRequestMatchResource.invalidatePrefix( + makeGitHubNoPullRequestCachePrefix(worktreePath), + ); + recordGitHubCacheMetric({ + kind: "comments", + event: "invalidate", + worktreePath, + }); +} + +function extractWorktreePathFromCacheKey(cacheKey: string): string | null { + const commentsSeparator = "::comments::"; + const previewSeparator = "::preview::"; + + if (cacheKey.includes(commentsSeparator)) { + return cacheKey.split(commentsSeparator)[0] || null; + } + + if (cacheKey.includes(previewSeparator)) { + return cacheKey.split(previewSeparator)[0] || null; + } + + return cacheKey || null; } diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/comments.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/comments.ts index c4b7a7538b4..a3451fb1452 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/comments.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/comments.ts @@ -1,6 +1,7 @@ import type { PullRequestComment } from "@superset/local-db"; import type { z } from "zod"; import { execWithShellEnv } from "../shell-env"; +import { trackGitHubOperation } from "./github-metrics"; import { GHIssueCommentSchema, type GHReviewThreadCommentSchema, @@ -100,6 +101,149 @@ function sortPullRequestComments( return comments.sort((a, b) => (b.createdAt ?? 0) - (a.createdAt ?? 0)); } +const RESOLVE_REVIEW_THREAD_MUTATION = ` +mutation ResolveReviewThread($threadId: ID!) { + resolveReviewThread(input: {threadId: $threadId}) { + thread { + id + isResolved + } + } +} +`; + +const UNRESOLVE_REVIEW_THREAD_MUTATION = ` +mutation UnresolveReviewThread($threadId: ID!) { + unresolveReviewThread(input: {threadId: $threadId}) { + thread { + id + isResolved + } + } +} +`; + +export async function resolveReviewThread({ + worktreePath, + threadId, + resolve, +}: { + worktreePath: string; + threadId: string; + resolve: boolean; +}): Promise { + const mutation = resolve + ? RESOLVE_REVIEW_THREAD_MUTATION + : UNRESOLVE_REVIEW_THREAD_MUTATION; + + const { stdout } = await trackGitHubOperation({ + name: "gh_graphql_resolve_review_thread", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv( + "gh", + [ + "api", + "graphql", + "-f", + `query=${mutation}`, + "-F", + `threadId=${threadId}`, + ], + { cwd: worktreePath }, + ), + }); + + const json = JSON.parse(stdout.trim()); + if (Array.isArray(json.errors) && json.errors.length > 0) { + const msg = json.errors + .map((e: { message?: string }) => e.message) + .join("; "); + throw new Error(msg || "GraphQL mutation failed"); + } +} + +const ADD_REVIEW_THREAD_REPLY_MUTATION = ` +mutation AddPullRequestReviewThreadReply($threadId: ID!, $body: String!) { + addPullRequestReviewThreadReply(input: {pullRequestReviewThreadId: $threadId, body: $body}) { + comment { + id + } + } +} +`; + +export async function replyToReviewThread({ + worktreePath, + threadId, + body, +}: { + worktreePath: string; + threadId: string; + body: string; +}): Promise { + const { stdout } = await trackGitHubOperation({ + name: "gh_graphql_reply_review_thread", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv( + "gh", + [ + "api", + "graphql", + "-f", + `query=${ADD_REVIEW_THREAD_REPLY_MUTATION}`, + "-F", + `threadId=${threadId}`, + "-f", + `body=${body}`, + ], + { cwd: worktreePath }, + ), + }); + + const json = JSON.parse(stdout.trim()); + if (Array.isArray(json.errors) && json.errors.length > 0) { + const msg = json.errors + .map((e: { message?: string }) => e.message) + .join("; "); + throw new Error(msg || "GraphQL mutation failed"); + } +} + +export async function addPullRequestConversationComment({ + worktreePath, + repoNameWithOwner, + pullRequestNumber, + body, +}: { + worktreePath: string; + repoNameWithOwner: string; + pullRequestNumber: number; + body: string; +}): Promise { + await trackGitHubOperation({ + name: "gh_api_add_issue_comment", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv( + "gh", + [ + "api", + "--method", + "POST", + `repos/${repoNameWithOwner}/issues/${pullRequestNumber}/comments`, + "-f", + `body=${body}`, + ], + { cwd: worktreePath }, + ), + }); +} + function getReviewThreadCommentId( comment: ReviewThreadCommentNode, ): string | null { @@ -135,10 +279,10 @@ function parseReviewThreadCommentNode({ createdAt: parseTimestamp(comment.createdAt), url: comment.url, kind: "review" as const, + threadId, path: comment.path, line: comment.line ?? comment.originalLine ?? undefined, isResolved, - ...(threadId ? { threadId } : {}), }; } @@ -207,7 +351,7 @@ export function parseReviewThreadCommentsResponse( return parseReviewThreadCommentsConnection({ comments: result.data.comments, isResolved: result.data.isResolved === true, - threadId: result.data.id, + threadId: result.data.id ?? undefined, }); }), ); @@ -261,65 +405,19 @@ export function mergePullRequestComments( return sortPullRequestComments([...commentsById.values()]); } -const RESOLVE_REVIEW_THREAD_MUTATION = ` -mutation ResolveReviewThread($threadId: ID!) { - resolveReviewThread(input: {threadId: $threadId}) { - thread { - id - isResolved - } - } -} -`; - -const UNRESOLVE_REVIEW_THREAD_MUTATION = ` -mutation UnresolveReviewThread($threadId: ID!) { - unresolveReviewThread(input: {threadId: $threadId}) { - thread { - id - isResolved - } - } -} -`; - -export async function resolveReviewThread({ - worktreePath, - threadId, - resolve, -}: { - worktreePath: string; - threadId: string; - resolve: boolean; -}): Promise { - const mutation = resolve - ? RESOLVE_REVIEW_THREAD_MUTATION - : UNRESOLVE_REVIEW_THREAD_MUTATION; - - const { stdout } = await execWithShellEnv( - "gh", - ["api", "graphql", "-f", `query=${mutation}`, "-F", `threadId=${threadId}`], - { cwd: worktreePath }, - ); - - const json = JSON.parse(stdout.trim()); - if (Array.isArray(json.errors) && json.errors.length > 0) { - const msg = json.errors - .map((e: { message?: string }) => e.message) - .join("; "); - throw new Error(msg || "GraphQL mutation failed"); - } -} - async function fetchPaginatedCommentsEndpoint( worktreePath: string, endpoint: string, ): Promise { - const { stdout } = await execWithShellEnv( - "gh", - ["api", "--paginate", "--slurp", endpoint], - { cwd: worktreePath }, - ); + const { stdout } = await trackGitHubOperation({ + name: "gh_api_issue_comments_paginated", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv("gh", ["api", "--paginate", "--slurp", endpoint], { + cwd: worktreePath, + }), + }); return parsePaginatedApiArray(stdout); } @@ -362,20 +460,26 @@ async function fetchAdditionalReviewThreadCommentsForThread({ while (afterCursor) { let stdout: string; try { - const result = await execWithShellEnv( - "gh", - [ - "api", - "graphql", - "-f", - `query=${REVIEW_THREAD_COMMENTS_QUERY}`, - "-F", - `threadId=${threadId}`, - "-F", - `after=${afterCursor}`, - ], - { cwd: worktreePath }, - ); + const result = await trackGitHubOperation({ + name: "gh_graphql_review_thread_comments_page", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv( + "gh", + [ + "api", + "graphql", + "-f", + `query=${REVIEW_THREAD_COMMENTS_QUERY}`, + "-F", + `threadId=${threadId}`, + "-F", + `after=${afterCursor}`, + ], + { cwd: worktreePath }, + ), + }); stdout = result.stdout; } catch (error) { console.warn( @@ -461,8 +565,14 @@ async function fetchReviewThreadCommentsForPullRequest( let stdout: string; try { - const result = await execWithShellEnv("gh", args, { - cwd: worktreePath, + const result = await trackGitHubOperation({ + name: "gh_graphql_review_threads", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv("gh", args, { + cwd: worktreePath, + }), }); stdout = result.stdout; } catch (error) { @@ -511,7 +621,7 @@ async function fetchReviewThreadCommentsForPullRequest( ...parseReviewThreadCommentsConnection({ comments: thread.comments, isResolved, - threadId: thread.id, + threadId: thread.id ?? undefined, }), ); diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github-metrics.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github-metrics.ts new file mode 100644 index 00000000000..a49f78bbb3a --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github-metrics.ts @@ -0,0 +1,574 @@ +const ROLLING_WINDOW_MS = 5 * 60 * 1000; +const MAX_RECENT_OPERATION_EVENTS = 2000; +const MAX_RECENT_CACHE_EVENTS = 2000; +const MAX_LAST_ERRORS = 20; + +export type GitHubMetricOperationCategory = "sync" | "gh"; +export type GitHubCacheMetricKind = "status" | "comments" | "preview"; +export type GitHubCacheMetricEvent = + | "fresh_hit" + | "stale_hit" + | "miss" + | "force_fresh" + | "write" + | "invalidate"; + +interface OperationEvent { + timestamp: number; + name: string; + category: GitHubMetricOperationCategory; + success: boolean; + rateLimited: boolean; + durationMs: number; + worktreePath: string | null; + errorMessage: string | null; +} + +interface CacheEvent { + timestamp: number; + kind: GitHubCacheMetricKind; + event: GitHubCacheMetricEvent; + worktreePath: string | null; +} + +interface OperationAggregateWorkspace { + calls: number; + successes: number; + failures: number; + rateLimited: number; + lastRunAt: number | null; +} + +interface OperationAggregate { + name: string; + category: GitHubMetricOperationCategory; + calls: number; + successes: number; + failures: number; + rateLimited: number; + totalDurationMs: number; + maxDurationMs: number; + lastDurationMs: number | null; + lastRunAt: number | null; + lastErrorAt: number | null; + lastErrorMessage: string | null; + workspaces: Map; +} + +interface CacheAggregate { + kind: GitHubCacheMetricKind; + freshHits: number; + staleHits: number; + misses: number; + forceFresh: number; + writes: number; + invalidations: number; +} + +interface LastErrorEntry { + at: number; + operation: string; + category: GitHubMetricOperationCategory; + message: string; + worktreePath: string | null; +} + +export interface GitHubOperationWorkspaceBreakdown { + worktreePath: string; + sessionCalls: number; + rolling5mCalls: number; + lastRunAt: number | null; +} + +export interface GitHubOperationMetricSnapshot { + name: string; + category: GitHubMetricOperationCategory; + session: { + calls: number; + successes: number; + failures: number; + rateLimited: number; + avgDurationMs: number; + maxDurationMs: number; + }; + rolling5m: { + calls: number; + successes: number; + failures: number; + rateLimited: number; + avgDurationMs: number; + maxDurationMs: number; + }; + lastRunAt: number | null; + lastDurationMs: number | null; + lastErrorAt: number | null; + lastErrorMessage: string | null; + workspaces: GitHubOperationWorkspaceBreakdown[]; +} + +export interface GitHubCacheMetricSnapshot { + kind: GitHubCacheMetricKind; + session: CacheAggregateCounts; + rolling5m: CacheAggregateCounts; +} + +interface CacheAggregateCounts { + freshHits: number; + staleHits: number; + misses: number; + forceFresh: number; + writes: number; + invalidations: number; +} + +export interface GitHubMetricsSnapshot { + sessionStartedAt: number; + generatedAt: number; + totals: { + sessionCallCount: number; + sessionFailureCount: number; + rolling5mCallCount: number; + rolling5mFailureCount: number; + rolling5mRateLimitedCount: number; + }; + operations: GitHubOperationMetricSnapshot[]; + caches: GitHubCacheMetricSnapshot[]; + lastErrors: LastErrorEntry[]; +} + +const sessionStartedAt = Date.now(); +const operationAggregates = new Map(); +const cacheAggregates = new Map(); +const recentOperationEvents: OperationEvent[] = []; +const recentCacheEvents: CacheEvent[] = []; +const lastErrors: LastErrorEntry[] = []; + +function trimRecentEvents(now: number): void { + const operationCutoff = now - ROLLING_WINDOW_MS; + while ( + recentOperationEvents.length > 0 && + (recentOperationEvents.length > MAX_RECENT_OPERATION_EVENTS || + recentOperationEvents[0]?.timestamp < operationCutoff) + ) { + recentOperationEvents.shift(); + } + + while ( + recentCacheEvents.length > 0 && + (recentCacheEvents.length > MAX_RECENT_CACHE_EVENTS || + recentCacheEvents[0]?.timestamp < operationCutoff) + ) { + recentCacheEvents.shift(); + } +} + +function getOperationAggregateKey( + name: string, + category: GitHubMetricOperationCategory, +): string { + return `${category}:${name}`; +} + +function getOrCreateOperationAggregate({ + name, + category, +}: { + name: string; + category: GitHubMetricOperationCategory; +}): OperationAggregate { + const key = getOperationAggregateKey(name, category); + const existing = operationAggregates.get(key); + if (existing) { + return existing; + } + + const aggregate: OperationAggregate = { + name, + category, + calls: 0, + successes: 0, + failures: 0, + rateLimited: 0, + totalDurationMs: 0, + maxDurationMs: 0, + lastDurationMs: null, + lastRunAt: null, + lastErrorAt: null, + lastErrorMessage: null, + workspaces: new Map(), + }; + operationAggregates.set(key, aggregate); + return aggregate; +} + +function getOrCreateCacheAggregate( + kind: GitHubCacheMetricKind, +): CacheAggregate { + const existing = cacheAggregates.get(kind); + if (existing) { + return existing; + } + + const aggregate: CacheAggregate = { + kind, + freshHits: 0, + staleHits: 0, + misses: 0, + forceFresh: 0, + writes: 0, + invalidations: 0, + }; + cacheAggregates.set(kind, aggregate); + return aggregate; +} + +function normalizeErrorMessage(error: unknown): string | null { + if (error instanceof Error) { + return error.message.slice(0, 300); + } + + if (typeof error === "string") { + return error.slice(0, 300); + } + + return null; +} + +function recordLastError(entry: LastErrorEntry): void { + lastErrors.push(entry); + if (lastErrors.length > MAX_LAST_ERRORS) { + lastErrors.shift(); + } +} + +export function trackGitHubOperationEvent({ + name, + category, + worktreePath = null, + success, + durationMs, + rateLimited = false, + error, +}: { + name: string; + category: GitHubMetricOperationCategory; + worktreePath?: string | null; + success: boolean; + durationMs: number; + rateLimited?: boolean; + error?: unknown; +}): void { + const now = Date.now(); + trimRecentEvents(now); + + const errorMessage = success ? null : normalizeErrorMessage(error); + const aggregate = getOrCreateOperationAggregate({ name, category }); + aggregate.calls += 1; + aggregate.successes += success ? 1 : 0; + aggregate.failures += success ? 0 : 1; + aggregate.rateLimited += rateLimited ? 1 : 0; + aggregate.totalDurationMs += durationMs; + aggregate.maxDurationMs = Math.max(aggregate.maxDurationMs, durationMs); + aggregate.lastDurationMs = durationMs; + aggregate.lastRunAt = now; + + if (errorMessage) { + aggregate.lastErrorAt = now; + aggregate.lastErrorMessage = errorMessage; + recordLastError({ + at: now, + operation: name, + category, + message: errorMessage, + worktreePath, + }); + } + + if (worktreePath) { + const workspaceAggregate = aggregate.workspaces.get(worktreePath) ?? { + calls: 0, + successes: 0, + failures: 0, + rateLimited: 0, + lastRunAt: null, + }; + workspaceAggregate.calls += 1; + workspaceAggregate.successes += success ? 1 : 0; + workspaceAggregate.failures += success ? 0 : 1; + workspaceAggregate.rateLimited += rateLimited ? 1 : 0; + workspaceAggregate.lastRunAt = now; + aggregate.workspaces.set(worktreePath, workspaceAggregate); + } + + recentOperationEvents.push({ + timestamp: now, + name, + category, + success, + rateLimited, + durationMs, + worktreePath, + errorMessage, + }); +} + +export async function trackGitHubOperation({ + name, + category, + worktreePath = null, + fn, +}: { + name: string; + category: GitHubMetricOperationCategory; + worktreePath?: string | null; + fn: () => Promise; +}): Promise { + const startedAt = Date.now(); + try { + const result = await fn(); + trackGitHubOperationEvent({ + name, + category, + worktreePath, + success: true, + durationMs: Date.now() - startedAt, + }); + return result; + } catch (error) { + trackGitHubOperationEvent({ + name, + category, + worktreePath, + success: false, + durationMs: Date.now() - startedAt, + error, + }); + throw error; + } +} + +export function recordGitHubCacheMetric({ + kind, + event, + worktreePath = null, +}: { + kind: GitHubCacheMetricKind; + event: GitHubCacheMetricEvent; + worktreePath?: string | null; +}): void { + const now = Date.now(); + trimRecentEvents(now); + + const aggregate = getOrCreateCacheAggregate(kind); + switch (event) { + case "fresh_hit": + aggregate.freshHits += 1; + break; + case "stale_hit": + aggregate.staleHits += 1; + break; + case "miss": + aggregate.misses += 1; + break; + case "force_fresh": + aggregate.forceFresh += 1; + break; + case "write": + aggregate.writes += 1; + break; + case "invalidate": + aggregate.invalidations += 1; + break; + } + + recentCacheEvents.push({ + timestamp: now, + kind, + event, + worktreePath, + }); +} + +export function getGitHubMetricsSnapshot(): GitHubMetricsSnapshot { + const now = Date.now(); + trimRecentEvents(now); + const rollingOperationCutoff = now - ROLLING_WINDOW_MS; + const recentOperations = recentOperationEvents.filter( + (event) => event.timestamp >= rollingOperationCutoff, + ); + const recentCaches = recentCacheEvents.filter( + (event) => event.timestamp >= rollingOperationCutoff, + ); + + const operations = [...operationAggregates.values()] + .map((aggregate) => { + const rolling = recentOperations.filter( + (event) => + event.name === aggregate.name && + event.category === aggregate.category, + ); + const rollingWorkspaceMap = new Map< + string, + { calls: number; lastRunAt: number | null } + >(); + + for (const event of rolling) { + if (!event.worktreePath) { + continue; + } + const workspaceEntry = rollingWorkspaceMap.get(event.worktreePath) ?? { + calls: 0, + lastRunAt: null, + }; + workspaceEntry.calls += 1; + workspaceEntry.lastRunAt = event.timestamp; + rollingWorkspaceMap.set(event.worktreePath, workspaceEntry); + } + + const rollingTotalDurationMs = rolling.reduce( + (total, event) => total + event.durationMs, + 0, + ); + + const workspaces = [...aggregate.workspaces.entries()] + .map(([worktreePath, workspaceAggregate]) => ({ + worktreePath, + sessionCalls: workspaceAggregate.calls, + rolling5mCalls: rollingWorkspaceMap.get(worktreePath)?.calls ?? 0, + lastRunAt: + rollingWorkspaceMap.get(worktreePath)?.lastRunAt ?? + workspaceAggregate.lastRunAt, + })) + .sort((left, right) => right.sessionCalls - left.sessionCalls); + + return { + name: aggregate.name, + category: aggregate.category, + session: { + calls: aggregate.calls, + successes: aggregate.successes, + failures: aggregate.failures, + rateLimited: aggregate.rateLimited, + avgDurationMs: + aggregate.calls > 0 + ? aggregate.totalDurationMs / aggregate.calls + : 0, + maxDurationMs: aggregate.maxDurationMs, + }, + rolling5m: { + calls: rolling.length, + successes: rolling.filter((event) => event.success).length, + failures: rolling.filter((event) => !event.success).length, + rateLimited: rolling.filter((event) => event.rateLimited).length, + avgDurationMs: + rolling.length > 0 ? rollingTotalDurationMs / rolling.length : 0, + maxDurationMs: rolling.reduce( + (max, event) => Math.max(max, event.durationMs), + 0, + ), + }, + lastRunAt: aggregate.lastRunAt, + lastDurationMs: aggregate.lastDurationMs, + lastErrorAt: aggregate.lastErrorAt, + lastErrorMessage: aggregate.lastErrorMessage, + workspaces, + }; + }) + .sort((left, right) => { + if (right.rolling5m.calls !== left.rolling5m.calls) { + return right.rolling5m.calls - left.rolling5m.calls; + } + return right.session.calls - left.session.calls; + }); + + const caches: GitHubCacheMetricSnapshot[] = ( + ["status", "comments", "preview"] as const + ).map((kind) => { + const session = cacheAggregates.get(kind) ?? { + kind, + freshHits: 0, + staleHits: 0, + misses: 0, + forceFresh: 0, + writes: 0, + invalidations: 0, + }; + const rolling = recentCaches.filter((event) => event.kind === kind); + const rollingCounts = rolling.reduce( + (counts, event) => { + switch (event.event) { + case "fresh_hit": + counts.freshHits += 1; + break; + case "stale_hit": + counts.staleHits += 1; + break; + case "miss": + counts.misses += 1; + break; + case "force_fresh": + counts.forceFresh += 1; + break; + case "write": + counts.writes += 1; + break; + case "invalidate": + counts.invalidations += 1; + break; + } + return counts; + }, + { + freshHits: 0, + staleHits: 0, + misses: 0, + forceFresh: 0, + writes: 0, + invalidations: 0, + }, + ); + + return { + kind, + session: { + freshHits: session.freshHits, + staleHits: session.staleHits, + misses: session.misses, + forceFresh: session.forceFresh, + writes: session.writes, + invalidations: session.invalidations, + }, + rolling5m: rollingCounts, + }; + }); + + return { + sessionStartedAt, + generatedAt: now, + totals: { + sessionCallCount: operations.reduce( + (total, operation) => total + operation.session.calls, + 0, + ), + sessionFailureCount: operations.reduce( + (total, operation) => total + operation.session.failures, + 0, + ), + rolling5mCallCount: operations.reduce( + (total, operation) => total + operation.rolling5m.calls, + 0, + ), + rolling5mFailureCount: operations.reduce( + (total, operation) => total + operation.rolling5m.failures, + 0, + ), + rolling5mRateLimitedCount: operations.reduce( + (total, operation) => total + operation.rolling5m.rateLimited, + 0, + ), + }, + operations, + caches, + lastErrors: [...lastErrors].reverse(), + }; +} diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github-rate-limiter.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github-rate-limiter.ts new file mode 100644 index 00000000000..e42078ff0f0 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github-rate-limiter.ts @@ -0,0 +1,79 @@ +/** + * Centralized GitHub API rate limiter. + * + * Detects HTTP 403 (secondary rate limit) errors from `gh` CLI commands + * and pauses ALL GitHub API calls with exponential backoff until the + * rate limit window resets. + */ + +const INITIAL_BACKOFF_MS = 30_000; +const MAX_BACKOFF_MS = 300_000; +const BACKOFF_MULTIPLIER = 2; + +let pausedUntil = 0; +let currentBackoffMs = INITIAL_BACKOFF_MS; +let consecutiveFailures = 0; + +export interface GitHubRateLimitState { + isRateLimited: boolean; + resumeAt: number | null; + currentBackoffMs: number; + consecutiveFailures: number; +} + +export function isRateLimited(): boolean { + return Date.now() < pausedUntil; +} + +export function getRateLimitResumeTime(): number { + return pausedUntil; +} + +export function getGitHubRateLimitState(): GitHubRateLimitState { + return { + isRateLimited: isRateLimited(), + resumeAt: pausedUntil > 0 ? pausedUntil : null, + currentBackoffMs, + consecutiveFailures, + }; +} + +export function onRateLimitHit(): void { + consecutiveFailures++; + currentBackoffMs = Math.min( + INITIAL_BACKOFF_MS * BACKOFF_MULTIPLIER ** (consecutiveFailures - 1), + MAX_BACKOFF_MS, + ); + pausedUntil = Date.now() + currentBackoffMs; + console.warn( + `[GitHub] Rate limit hit. Pausing all API calls for ${currentBackoffMs / 1000}s (attempt ${consecutiveFailures})`, + ); +} + +export function onRateLimitSuccess(): void { + if (consecutiveFailures > 0) { + consecutiveFailures = 0; + currentBackoffMs = INITIAL_BACKOFF_MS; + console.log("[GitHub] Rate limit recovered. Resuming normal operations."); + } +} + +export function isSecondaryRateLimitError(error: unknown): boolean { + if (!(error instanceof Error)) return false; + + const message = (error.message || "").toLowerCase(); + const stdout = + "stdout" in error && typeof error.stdout === "string" + ? error.stdout.toLowerCase() + : ""; + const stderr = + "stderr" in error && typeof error.stderr === "string" + ? error.stderr.toLowerCase() + : ""; + + const haystack = `${message} ${stdout} ${stderr}`; + return ( + haystack.includes("secondary rate limit") || + haystack.includes("exceeded a secondary rate limit") + ); +} diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github-sync-service.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github-sync-service.ts new file mode 100644 index 00000000000..1d2d6112e61 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github-sync-service.ts @@ -0,0 +1,489 @@ +/** + * GitHubSyncService — centralized GitHub API polling for all workspaces. + * + * Instead of each UI surface independently polling the GitHub API, this + * service runs per-workspace timers that proactively keep the backend + * cache warm. Frontend tRPC queries read from the always-warm cache + * without triggering additional API calls. + * + * Only the **active** workspace is polled. When the user switches to a + * different workspace, the previous one is deactivated (timers stopped) + * and the new one is activated (timers started). + * + * Intervals: + * - PR status: 30 seconds by default, 15 seconds while checks are pending + * - PR comments: 60 seconds for the currently attached PR only + * + * Rate limiting is handled by rateLimitedRefresh() in github.ts — the + * SyncService does NOT call onRateLimitHit/Success directly to avoid + * double-counting with the lower-level wrapper. + * + * --- FORK NOTE --- + * This service is a fork-specific replacement for upstream's frontend + * hover-debounce approach (useHoverGitHubStatus, commit be22b46dd, #3125). + * Upstream fetches GitHub data on-demand from the frontend; this fork + * centralizes polling in the backend for better API call efficiency. + * See also: githubQueryPolicy.ts for the frontend cache-reading strategy. + */ + +import type { GitHubStatus, PullRequestComment } from "@superset/local-db"; +import type { PullRequestCommentsTarget } from "./github"; +import { isRateLimited } from "./github-rate-limiter"; + +export const SYNC_PR_STATUS_INTERVAL_MS = 30_000; +export const SYNC_PR_STATUS_PENDING_INTERVAL_MS = 15_000; +export const SYNC_PR_COMMENTS_INTERVAL_MS = 60_000; + +type FetchPRStatusFn = (worktreePath: string) => Promise; +type FetchPRCommentsFn = (params: { + worktreePath: string; + pullRequest?: PullRequestCommentsTarget | null; +}) => Promise; + +interface WorkspaceSyncState { + worktreePath: string; + prStatusTimer: ReturnType | null; + prCommentsTimer: ReturnType | null; + nextPRStatusSyncAt: number | null; + nextPRCommentsSyncAt: number | null; + isActive: boolean; + prStatusInFlight: boolean; + prCommentsInFlight: boolean; + latestStatus: GitHubStatus | null; + lastPRStatusSuccessAt: number | null; + lastPRStatusErrorAt: number | null; + lastPRStatusErrorMessage: string | null; + lastPRCommentsSuccessAt: number | null; + lastPRCommentsErrorAt: number | null; + lastPRCommentsErrorMessage: string | null; +} + +interface SyncServiceDeps { + fetchPRStatus: FetchPRStatusFn; + fetchPRComments: FetchPRCommentsFn; + onPRStatusUpdate?: ( + worktreePath: string, + status: GitHubStatus | null, + ) => void; +} + +export interface GitHubSyncWorkspaceDebugSnapshot { + worktreePath: string; + isActive: boolean; + prStatusInFlight: boolean; + prCommentsInFlight: boolean; + nextPRStatusSyncAt: number | null; + nextPRCommentsSyncAt: number | null; + prStatusIntervalMs: number; + prCommentsIntervalMs: number | null; + lastPRStatusSuccessAt: number | null; + lastPRStatusErrorAt: number | null; + lastPRStatusErrorMessage: string | null; + lastPRCommentsSuccessAt: number | null; + lastPRCommentsErrorAt: number | null; + lastPRCommentsErrorMessage: string | null; + latestStatus: { + hasPr: boolean; + prNumber: number | null; + checksStatus: NonNullable["checksStatus"] | null; + repoUrl: string | null; + branchExistsOnRemote: boolean; + lastRefreshed: number | null; + }; +} + +export interface GitHubSyncServiceDebugSnapshot { + registeredWorkspaceCount: number; + activeWorkspaceCount: number; + activeWorktreePaths: string[]; + workspaces: GitHubSyncWorkspaceDebugSnapshot[]; +} + +class GitHubSyncServiceImpl { + private workspaces = new Map(); + private deps: SyncServiceDeps | null = null; + + initialize(deps: SyncServiceDeps): void { + this.deps = deps; + } + + /** + * Register a workspace WITHOUT starting polling timers. + * The workspace is registered as inactive — call activateWorkspace() + * or setActiveWorkspace() to start polling. + * + * This prevents the "all workspaces poll until setActiveWorkspace + * arrives" race condition at startup. + */ + registerWorkspace(worktreePath: string): void { + if (this.workspaces.has(worktreePath)) { + return; + } + + const state: WorkspaceSyncState = { + worktreePath, + prStatusTimer: null, + prCommentsTimer: null, + nextPRStatusSyncAt: null, + nextPRCommentsSyncAt: null, + isActive: false, + prStatusInFlight: false, + prCommentsInFlight: false, + latestStatus: null, + lastPRStatusSuccessAt: null, + lastPRStatusErrorAt: null, + lastPRStatusErrorMessage: null, + lastPRCommentsSuccessAt: null, + lastPRCommentsErrorAt: null, + lastPRCommentsErrorMessage: null, + }; + + this.workspaces.set(worktreePath, state); + } + + /** + * Unregister a workspace completely (e.g., workspace deleted). + * Stops timers and removes from the registry. + */ + unregisterWorkspace(worktreePath: string): void { + const state = this.workspaces.get(worktreePath); + if (!state) return; + + this.stopTimers(state); + state.isActive = false; + this.workspaces.delete(worktreePath); + } + + /** + * Activate a workspace, starting its polling timers and triggering + * an immediate sync. If not yet registered, registers it first. + */ + activateWorkspace(worktreePath: string): void { + let state = this.workspaces.get(worktreePath); + + if (!state) { + this.registerWorkspace(worktreePath); + const registeredState = this.workspaces.get(worktreePath); + if (!registeredState) { + return; + } + state = registeredState; + } + + if (state.isActive) return; + + state.isActive = true; + this.stopTimers(state); + void this.primeWorkspace(worktreePath); + } + + /** + * Deactivate a workspace, pausing its polling timers. + * The workspace remains in the registry and can be reactivated. + */ + deactivateWorkspace(worktreePath: string): void { + const state = this.workspaces.get(worktreePath); + if (!state || !state.isActive) return; + + state.isActive = false; + this.stopTimers(state); + } + + /** + * Deactivate all workspaces except the given one. + * Activates the given workspace if not already active. + * Pass null to deactivate all workspaces (e.g., navigating away from workspaces). + */ + setActiveWorkspace(worktreePath: string | null): void { + for (const state of this.workspaces.values()) { + if (worktreePath && state.worktreePath === worktreePath) { + if (!state.isActive) { + state.isActive = true; + this.stopTimers(state); + void this.primeWorkspace(state.worktreePath); + } + } else if (state.isActive) { + state.isActive = false; + this.stopTimers(state); + } + } + + // Register and activate if not yet known + if (worktreePath && !this.workspaces.has(worktreePath)) { + this.registerWorkspace(worktreePath); + this.activateWorkspace(worktreePath); + } + } + + /** + * Deactivate all workspaces. Used when navigating away from workspace views. + */ + deactivateAll(): void { + for (const state of this.workspaces.values()) { + if (state.isActive) { + state.isActive = false; + this.stopTimers(state); + } + } + } + + /** + * Trigger an immediate refresh for a workspace. + * Used after user mutations (merge, reviewer add, etc.) + * to provide instant feedback. + */ + async invalidate( + worktreePath: string, + scope: "all" | "prStatus" | "prComments" = "all", + ): Promise { + if (!this.deps) return; + + if (scope === "all" || scope === "prStatus") { + await this.syncPRStatus(worktreePath); + } + if (scope === "all" || scope === "prComments") { + await this.syncPRComments(worktreePath); + } + } + + /** + * Clean up all timers (e.g., on app quit). + */ + destroy(): void { + for (const state of this.workspaces.values()) { + this.stopTimers(state); + state.isActive = false; + } + this.workspaces.clear(); + } + + isRegistered(worktreePath: string): boolean { + return this.workspaces.has(worktreePath); + } + + getDebugSnapshot(): GitHubSyncServiceDebugSnapshot { + const workspaces = [...this.workspaces.values()].map((state) => ({ + worktreePath: state.worktreePath, + isActive: state.isActive, + prStatusInFlight: state.prStatusInFlight, + prCommentsInFlight: state.prCommentsInFlight, + nextPRStatusSyncAt: state.nextPRStatusSyncAt, + nextPRCommentsSyncAt: state.nextPRCommentsSyncAt, + prStatusIntervalMs: this.getPRStatusInterval(state), + prCommentsIntervalMs: getPullRequestCommentsTargetFromStatus( + state.latestStatus, + ) + ? SYNC_PR_COMMENTS_INTERVAL_MS + : null, + lastPRStatusSuccessAt: state.lastPRStatusSuccessAt, + lastPRStatusErrorAt: state.lastPRStatusErrorAt, + lastPRStatusErrorMessage: state.lastPRStatusErrorMessage, + lastPRCommentsSuccessAt: state.lastPRCommentsSuccessAt, + lastPRCommentsErrorAt: state.lastPRCommentsErrorAt, + lastPRCommentsErrorMessage: state.lastPRCommentsErrorMessage, + latestStatus: { + hasPr: Boolean(state.latestStatus?.pr), + prNumber: state.latestStatus?.pr?.number ?? null, + checksStatus: state.latestStatus?.pr?.checksStatus ?? null, + repoUrl: state.latestStatus?.repoUrl ?? null, + branchExistsOnRemote: state.latestStatus?.branchExistsOnRemote ?? false, + lastRefreshed: state.latestStatus?.lastRefreshed ?? null, + }, + })); + + return { + registeredWorkspaceCount: workspaces.length, + activeWorkspaceCount: workspaces.filter((workspace) => workspace.isActive) + .length, + activeWorktreePaths: workspaces + .filter((workspace) => workspace.isActive) + .map((workspace) => workspace.worktreePath), + workspaces, + }; + } + + private async primeWorkspace(worktreePath: string): Promise { + await this.syncPRStatus(worktreePath); + } + + private stopTimers(state: WorkspaceSyncState): void { + if (state.prStatusTimer) { + clearTimeout(state.prStatusTimer); + state.prStatusTimer = null; + } + state.nextPRStatusSyncAt = null; + if (state.prCommentsTimer) { + clearTimeout(state.prCommentsTimer); + state.prCommentsTimer = null; + } + state.nextPRCommentsSyncAt = null; + } + + private getPRStatusInterval(state: WorkspaceSyncState): number { + return state.latestStatus?.pr?.checksStatus === "pending" + ? SYNC_PR_STATUS_PENDING_INTERVAL_MS + : SYNC_PR_STATUS_INTERVAL_MS; + } + + private scheduleNextPRStatusSync(state: WorkspaceSyncState): void { + if (!state.isActive) { + return; + } + + if (state.prStatusTimer) { + clearTimeout(state.prStatusTimer); + } + + const intervalMs = this.getPRStatusInterval(state); + state.nextPRStatusSyncAt = Date.now() + intervalMs; + state.prStatusTimer = setTimeout(() => { + void this.syncPRStatus(state.worktreePath); + }, intervalMs); + } + + private scheduleNextPRCommentsSync(state: WorkspaceSyncState): void { + if (state.prCommentsTimer) { + clearTimeout(state.prCommentsTimer); + state.prCommentsTimer = null; + } + state.nextPRCommentsSyncAt = null; + + if ( + !state.isActive || + !getPullRequestCommentsTargetFromStatus(state.latestStatus) + ) { + return; + } + + state.nextPRCommentsSyncAt = Date.now() + SYNC_PR_COMMENTS_INTERVAL_MS; + state.prCommentsTimer = setTimeout(() => { + void this.syncPRComments(state.worktreePath); + }, SYNC_PR_COMMENTS_INTERVAL_MS); + } + + private async syncPRStatus(worktreePath: string): Promise { + const state = this.workspaces.get(worktreePath); + if (!this.deps || !state) return; + if (state.prStatusTimer) { + clearTimeout(state.prStatusTimer); + state.prStatusTimer = null; + } + state.nextPRStatusSyncAt = null; + if (isRateLimited() || state.prStatusInFlight) { + if (state.isActive && !state.prStatusInFlight) { + this.scheduleNextPRStatusSync(state); + } + return; + } + state.prStatusInFlight = true; + + const previousCommentsTargetKey = getPullRequestCommentsTargetKey( + state.latestStatus, + ); + + try { + const status = await this.deps.fetchPRStatus(worktreePath); + if (!this.workspaces.has(worktreePath)) return; + state.latestStatus = status; + state.lastPRStatusSuccessAt = Date.now(); + state.lastPRStatusErrorAt = null; + state.lastPRStatusErrorMessage = null; + this.deps.onPRStatusUpdate?.(worktreePath, status); + + const nextCommentsTargetKey = getPullRequestCommentsTargetKey(status); + if ( + previousCommentsTargetKey !== nextCommentsTargetKey && + !state.prCommentsInFlight + ) { + this.scheduleNextPRCommentsSync(state); + } + } catch (error) { + console.warn("[GitHub SyncService] PR status sync failed:", error); + state.lastPRStatusErrorAt = Date.now(); + state.lastPRStatusErrorMessage = + error instanceof Error ? error.message : String(error); + this.scheduleNextPRCommentsSync(state); + } finally { + const current = this.workspaces.get(worktreePath); + if (current) { + current.prStatusInFlight = false; + this.scheduleNextPRStatusSync(current); + } + } + } + + private async syncPRComments(worktreePath: string): Promise { + const state = this.workspaces.get(worktreePath); + if (!this.deps || !state) return; + if (state.prCommentsTimer) { + clearTimeout(state.prCommentsTimer); + state.prCommentsTimer = null; + } + state.nextPRCommentsSyncAt = null; + if (isRateLimited() || state.prCommentsInFlight) { + if (state.isActive && !state.prCommentsInFlight) { + this.scheduleNextPRCommentsSync(state); + } + return; + } + + const pullRequest = getPullRequestCommentsTargetFromStatus( + state.latestStatus, + ); + if (!pullRequest) { + return; + } + + state.prCommentsInFlight = true; + + try { + await this.deps.fetchPRComments({ worktreePath, pullRequest }); + if (!this.workspaces.has(worktreePath)) return; + state.lastPRCommentsSuccessAt = Date.now(); + state.lastPRCommentsErrorAt = null; + state.lastPRCommentsErrorMessage = null; + } catch (error) { + console.warn("[GitHub SyncService] PR comments sync failed:", error); + state.lastPRCommentsErrorAt = Date.now(); + state.lastPRCommentsErrorMessage = + error instanceof Error ? error.message : String(error); + } finally { + const current = this.workspaces.get(worktreePath); + if (current) { + current.prCommentsInFlight = false; + this.scheduleNextPRCommentsSync(current); + } + } + } +} + +function getPullRequestCommentsTargetFromStatus( + status: GitHubStatus | null, +): PullRequestCommentsTarget | null { + if (!status?.pr) { + return null; + } + + return { + prNumber: status.pr.number, + repoContext: { + repoUrl: status.repoUrl, + upstreamUrl: status.upstreamUrl ?? status.repoUrl, + isFork: status.isFork ?? false, + }, + prUrl: status.pr.url, + }; +} + +function getPullRequestCommentsTargetKey( + status: GitHubStatus | null, +): string | null { + const target = getPullRequestCommentsTargetFromStatus(status); + if (!target) { + return null; + } + + return `${target.repoContext.repoUrl}::${target.repoContext.upstreamUrl}::${target.prNumber}::${target.prUrl ?? ""}`; +} + +export const githubSyncService = new GitHubSyncServiceImpl(); diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github.test.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github.test.ts index a8794a67daa..9fa17c01459 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github.test.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github.test.ts @@ -6,6 +6,10 @@ import { parseReviewThreadCommentsResponse, } from "./comments"; import { resolveRemoteBranchNameForGitHubStatus } from "./github"; +import { + canAttachPullRequestToWorkspace, + resolveOpenPullRequestPushTarget, +} from "./pr-attachment"; import { branchMatchesPR, getPRHeadBranchCandidates, @@ -74,14 +78,103 @@ describe("getPullRequestRepoArgs", () => { }); }); +describe("pull request attachment", () => { + test("attaches same-repo open PRs using the fallback remote", () => { + expect( + resolveOpenPullRequestPushTarget({ + pr: { + headRefName: "feature/my-thing", + isCrossRepository: false, + state: "open", + }, + remotes: [ + { + name: "origin", + fetchUrl: "git@github.com:superset-sh/superset.git", + }, + ], + fallbackRemote: "origin", + }), + ).toEqual({ + remote: "origin", + targetBranch: "feature/my-thing", + }); + }); + + test("does not attach cross-repo open PRs when the fork remote is missing", () => { + expect( + canAttachPullRequestToWorkspace({ + pr: { + headRefName: "feature/my-thing", + headRepositoryOwner: "forkowner", + headRepositoryName: "superset", + isCrossRepository: true, + state: "open", + }, + remotes: [ + { + name: "origin", + fetchUrl: "git@github.com:superset-sh/superset.git", + }, + ], + fallbackRemote: "origin", + }), + ).toBe(false); + }); + + test("attaches cross-repo open PRs when the fork remote exists", () => { + expect( + resolveOpenPullRequestPushTarget({ + pr: { + headRefName: "feature/my-thing", + headRepositoryOwner: "forkowner", + headRepositoryName: "superset", + isCrossRepository: true, + state: "draft", + }, + remotes: [ + { + name: "origin", + fetchUrl: "git@github.com:superset-sh/superset.git", + }, + { + name: "forkowner", + fetchUrl: "git@github.com:forkowner/superset.git", + }, + ], + fallbackRemote: "origin", + }), + ).toEqual({ + remote: "forkowner", + targetBranch: "feature/my-thing", + }); + }); + + test("keeps historical PRs attached even without a fork remote", () => { + expect( + canAttachPullRequestToWorkspace({ + pr: { + headRefName: "feature/my-thing", + headRepositoryOwner: "forkowner", + headRepositoryName: "superset", + isCrossRepository: true, + state: "merged", + }, + remotes: [], + fallbackRemote: "origin", + }), + ).toBe(true); + }); +}); + describe("shouldRefreshCachedRepoContext", () => { - test("returns false when no cached repo context exists", () => { + test("returns true when no cached repo context exists", () => { expect( shouldRefreshCachedRepoContext({ originUrl: "https://github.com/superset-sh/superset", cachedRepoContext: null, }), - ).toBe(false); + ).toBe(true); }); test("returns false when the cached repo still matches origin", () => { diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github.ts index e4291c632a2..150d39bb620 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/github.ts @@ -4,35 +4,73 @@ import { getCurrentBranch, isUnbornHeadError, } from "../git"; -import { execGitWithShellPath } from "../git-client"; +import { execGitWithShellPath, getSimpleGitWithShellPath } from "../git-client"; import { execWithShellEnv } from "../shell-env"; import { parseUpstreamRef } from "../upstream-ref"; import { clearGitHubCachesForWorktree, + getCachedGitHubPreviewUrl, + getCachedGitHubStatus, + getCachedGitHubStatusState, getCachedPullRequestCommentsState, + makeGitHubPreviewCacheKey, makePullRequestCommentsCacheKey, + readCachedGitHubPreviewUrl, readCachedGitHubStatus, readCachedPullRequestComments, } from "./cache"; -import { fetchPullRequestComments, resolveReviewThread } from "./comments"; +import { + addPullRequestConversationComment, + fetchPullRequestComments, + replyToReviewThread, + resolveReviewThread, +} from "./comments"; +import { + trackGitHubOperation, + trackGitHubOperationEvent, +} from "./github-metrics"; +import { + isRateLimited, + isSecondaryRateLimitError, + onRateLimitHit, + onRateLimitSuccess, +} from "./github-rate-limiter"; +import { + canAttachPullRequestToWorkspace, + type GitRemoteInfo, +} from "./pr-attachment"; import { getPRForBranch } from "./pr-resolution"; import { extractNwoFromUrl, getRepoContext } from "./repo-context"; import { GHDeploymentSchema, GHDeploymentStatusSchema, + GHJobResponseSchema, type RepoContext, } from "./types"; export interface PullRequestCommentsTarget { prNumber: number; repoContext: Pick; + prUrl?: string | null; } -export { clearGitHubCachesForWorktree, resolveReviewThread }; +export { + addPullRequestConversationComment, + clearGitHubCachesForWorktree, + replyToReviewThread, + resolveReviewThread, +}; function getPullRequestCommentsRepoNameWithOwner( target: PullRequestCommentsTarget, ): string | null { + const prRepoNameWithOwner = target.prUrl + ? extractNwoFromUrl(target.prUrl) + : null; + if (prRepoNameWithOwner) { + return prRepoNameWithOwner; + } + const targetUrl = target.repoContext.isFork ? target.repoContext.upstreamUrl : target.repoContext.repoUrl; @@ -40,41 +78,34 @@ function getPullRequestCommentsRepoNameWithOwner( return extractNwoFromUrl(targetUrl); } -async function resolvePullRequestCommentsTarget( +async function getGitRemoteInfos( worktreePath: string, - branchOverride?: string | null, -): Promise { - const repoContext = await getRepoContext(worktreePath); - if (!repoContext) { - return null; - } - - const branchName = - branchOverride?.trim() || (await getCurrentBranch(worktreePath)); - if (!branchName) { - return null; - } - - const revParseTarget = branchOverride ? `refs/heads/${branchName}` : "HEAD"; - const shaResult = await execGitWithShellPath(["rev-parse", revParseTarget], { - cwd: worktreePath, - }).catch((error) => { - if (isUnbornHeadError(error)) { - return { stdout: "", stderr: "" }; - } - if (branchOverride) { - return { stdout: "", stderr: "" }; - } - throw error; - }); - const headSha = shaResult.stdout.trim() || undefined; +): Promise { + const git = await getSimpleGitWithShellPath(worktreePath); + const remotes = await git.getRemotes(true); + return remotes.map((remote) => ({ + name: remote.name, + fetchUrl: remote.refs.fetch, + pushUrl: remote.refs.push, + })); +} - if (branchOverride && !headSha) { - return null; - } +async function resolveAttachedPullRequest({ + worktreePath, + localBranch, + repoContext, + headSha, + fallbackRemote, +}: { + worktreePath: string; + localBranch: string; + repoContext: RepoContext; + headSha?: string; + fallbackRemote: string; +}): Promise { const prInfo = await getPRForBranch( worktreePath, - branchName, + localBranch, repoContext, headSha, ); @@ -82,9 +113,33 @@ async function resolvePullRequestCommentsTarget( return null; } + const remotes = await getGitRemoteInfos(worktreePath); + return canAttachPullRequestToWorkspace({ + pr: prInfo, + remotes, + fallbackRemote, + }) + ? prInfo + : null; +} + +async function resolvePullRequestCommentsTarget( + worktreePath: string, + branchName?: string | null, +): Promise { + const githubStatus = await fetchGitHubPRStatus(worktreePath, branchName); + if (!githubStatus?.pr) { + return null; + } + return { - prNumber: prInfo.number, - repoContext, + prNumber: githubStatus.pr.number, + repoContext: { + repoUrl: githubStatus.repoUrl, + upstreamUrl: githubStatus.upstreamUrl ?? githubStatus.repoUrl, + isFork: githubStatus.isFork ?? false, + }, + prUrl: githubStatus.pr.url, }; } @@ -100,106 +155,95 @@ export function resolveRemoteBranchNameForGitHubStatus({ return upstreamBranchName?.trim() || prHeadRefName?.trim() || localBranchName; } +interface ResolvedGitHubStatusContext { + repoContext: RepoContext; + branchName: string; + headSha?: string; + trackingRemote: string; + previewBranchName: string; + parsedUpstreamBranchName?: string | null; +} + +async function resolveGitHubStatusContext( + worktreePath: string, +): Promise { + const repoContext = await getRepoContext(worktreePath); + if (!repoContext) { + return null; + } + + const branchName = await getCurrentBranch(worktreePath); + if (!branchName) { + return null; + } + + const [shaResult, upstreamResult] = await Promise.all([ + execGitWithShellPath(["rev-parse", "HEAD"], { + cwd: worktreePath, + }).catch((error) => { + if (isUnbornHeadError(error)) { + return { stdout: "", stderr: "" }; + } + throw error; + }), + execGitWithShellPath(["rev-parse", "--abbrev-ref", "@{upstream}"], { + cwd: worktreePath, + }).catch(() => ({ stdout: "", stderr: "" })), + ]); + + const headSha = shaResult.stdout.trim() || undefined; + const parsedUpstreamRef = parseUpstreamRef(upstreamResult.stdout.trim()); + + return { + repoContext, + branchName, + headSha, + trackingRemote: parsedUpstreamRef?.remoteName ?? "origin", + previewBranchName: resolveRemoteBranchNameForGitHubStatus({ + localBranchName: branchName, + upstreamBranchName: parsedUpstreamRef?.branchName, + }), + parsedUpstreamBranchName: parsedUpstreamRef?.branchName, + }; +} + async function refreshGitHubPRStatus( worktreePath: string, - branchOverride?: string | null, ): Promise { try { - const repoContext = await getRepoContext(worktreePath); - if (!repoContext) { + const context = await resolveGitHubStatusContext(worktreePath); + if (!context) { return null; } - const branchName = - branchOverride?.trim() || (await getCurrentBranch(worktreePath)); - if (!branchName) { - return null; - } - - const revParseTarget = branchOverride ? `refs/heads/${branchName}` : "HEAD"; - const upstreamTarget = branchOverride - ? `${branchName}@{upstream}` - : "@{upstream}"; - - const [shaResult, upstreamResult] = await Promise.all([ - execGitWithShellPath(["rev-parse", revParseTarget], { - cwd: worktreePath, - }).catch((error) => { - if (isUnbornHeadError(error)) { - return { stdout: "", stderr: "" }; - } - if (branchOverride) { - return { stdout: "", stderr: "" }; - } - throw error; - }), - execGitWithShellPath(["rev-parse", "--abbrev-ref", upstreamTarget], { - cwd: worktreePath, - }).catch(() => ({ stdout: "", stderr: "" })), - ]); - const headSha = shaResult.stdout.trim() || undefined; - - // When using a branch override, we must have a valid SHA to avoid - // getPRForBranch falling back to HEAD (which is a different branch). - if (branchOverride && !headSha) { - return null; - } - - const parsedUpstreamRef = parseUpstreamRef(upstreamResult.stdout.trim()); - const trackingRemote = parsedUpstreamRef?.remoteName ?? "origin"; - const previewBranchName = resolveRemoteBranchNameForGitHubStatus({ - localBranchName: branchName, - upstreamBranchName: parsedUpstreamRef?.branchName, + const prInfo = await resolveAttachedPullRequest({ + worktreePath, + localBranch: context.branchName, + repoContext: context.repoContext, + headSha: context.headSha, + fallbackRemote: context.trackingRemote, }); - const [prInfo, previewUrl] = await Promise.all([ - getPRForBranch(worktreePath, branchName, repoContext, headSha), - fetchPreviewDeploymentUrl( - worktreePath, - headSha, - previewBranchName, - repoContext, - ), - ]); - const remoteBranchName = resolveRemoteBranchNameForGitHubStatus({ - localBranchName: branchName, - upstreamBranchName: parsedUpstreamRef?.branchName, + localBranchName: context.branchName, + upstreamBranchName: context.parsedUpstreamBranchName, prHeadRefName: prInfo?.headRefName, }); const branchCheck = await branchExistsOnRemote( worktreePath, remoteBranchName, - trackingRemote, + context.trackingRemote, ); - let finalPreviewUrl = previewUrl; - if (!finalPreviewUrl && prInfo?.number) { - const targetUrl = repoContext.isFork - ? repoContext.upstreamUrl - : repoContext.repoUrl; - const nwo = extractNwoFromUrl(targetUrl); - if (nwo) { - finalPreviewUrl = await queryDeploymentUrl( - worktreePath, - nwo, - `ref=${encodeURIComponent(`refs/pull/${prInfo.number}/merge`)}`, - ); - } - } - - const result: GitHubStatus = { + return { pr: prInfo, - repoUrl: repoContext.repoUrl, - upstreamUrl: repoContext.upstreamUrl, - isFork: repoContext.isFork, + repoUrl: context.repoContext.repoUrl, + upstreamUrl: context.repoContext.upstreamUrl, + isFork: context.repoContext.isFork, branchExistsOnRemote: branchCheck.status === "exists", - previewUrl: finalPreviewUrl, lastRefreshed: Date.now(), }; - - return result; } catch { return null; } @@ -225,18 +269,60 @@ async function refreshGitHubPRComments({ * Fetches GitHub PR status for a worktree or branch workspace using the `gh` CLI. * Returns null if `gh` is not installed, not authenticated, or on error. * - * @param branchName - Optional branch name override. When provided (for branch - * workspaces), resolves the SHA and upstream for that branch instead of using - * HEAD / the checked-out branch. Also used to scope the cache key. + * @param branchName - Optional branch name override. Used **only** to scope the + * cache key so multiple branch workspaces sharing a main-repo path do not + * cross-contaminate each other's PR status. The inner `refreshGitHubPRStatus` + * call still resolves SHA/upstream from the repo's currently checked-out + * branch — fully propagating the override inside the refresh path is out of + * scope for this PR because the fork's PR attachment / resolution helpers + * (`resolveGitHubStatusContext`, `resolveAttachedPullRequest`) differ from + * upstream and need a separate rework. Tracked as follow-up work. */ export async function fetchGitHubPRStatus( worktreePath: string, branchName?: string | null, ): Promise { const cacheKey = branchName ? `${worktreePath}::${branchName}` : worktreePath; - return readCachedGitHubStatus(cacheKey, () => - refreshGitHubPRStatus(worktreePath, branchName), - ); + if (isRateLimited()) { + // When rate limited, return stale cache or null — never throw, + // and never overwrite stale cache with null + const cached = getCachedGitHubStatus(cacheKey); + trackGitHubOperationEvent({ + name: "status_refresh", + category: "sync", + worktreePath, + success: cached !== null || getCachedGitHubStatusState(cacheKey) !== null, + durationMs: 0, + rateLimited: true, + error: + cached === null && getCachedGitHubStatusState(cacheKey) === null + ? "Rate limited without cached status" + : undefined, + }); + return cached; + } + return trackGitHubOperation({ + name: "status_refresh", + category: "sync", + worktreePath, + fn: () => + readCachedGitHubStatus(cacheKey, () => + rateLimitedRefresh(() => refreshGitHubPRStatus(worktreePath)), + ), + }); +} + +async function rateLimitedRefresh(fn: () => Promise): Promise { + try { + const result = await fn(); + onRateLimitSuccess(); + return result; + } catch (error) { + if (isSecondaryRateLimitError(error)) { + onRateLimitHit(); + } + throw error; + } } export async function fetchGitHubPRComments({ @@ -248,50 +334,138 @@ export async function fetchGitHubPRComments({ pullRequest?: PullRequestCommentsTarget | null; branchName?: string | null; }): Promise { + if (isRateLimited()) { + trackGitHubOperationEvent({ + name: "comments_refresh", + category: "sync", + worktreePath, + success: true, + durationMs: 0, + rateLimited: true, + }); + return []; + } try { - const pullRequestTarget = - pullRequest ?? - (await resolvePullRequestCommentsTarget(worktreePath, branchName)); - if (!pullRequestTarget) { - return []; - } + return await trackGitHubOperation({ + name: "comments_refresh", + category: "sync", + worktreePath, + fn: async () => { + const pullRequestTarget = + pullRequest ?? + (await resolvePullRequestCommentsTarget(worktreePath, branchName)); + if (!pullRequestTarget) { + return []; + } - const repoNameWithOwner = - getPullRequestCommentsRepoNameWithOwner(pullRequestTarget); - if (!repoNameWithOwner) { - return []; - } + const repoNameWithOwner = + getPullRequestCommentsRepoNameWithOwner(pullRequestTarget); + if (!repoNameWithOwner) { + return []; + } - const cacheKey = makePullRequestCommentsCacheKey({ - worktreePath, - repoNameWithOwner, - pullRequestNumber: pullRequestTarget.prNumber, - }); - try { - return await readCachedPullRequestComments(cacheKey, () => - refreshGitHubPRComments({ + const cacheKey = makePullRequestCommentsCacheKey({ worktreePath, repoNameWithOwner, pullRequestNumber: pullRequestTarget.prNumber, - }), - ); - } catch (error) { - const cached = getCachedPullRequestCommentsState(cacheKey); - if (cached) { - console.warn( - "[GitHub] Failed to refresh pull request comments; using cached value:", - error, - ); - return cached.value; - } - - throw error; - } + }); + try { + return await readCachedPullRequestComments(cacheKey, () => + rateLimitedRefresh(() => + refreshGitHubPRComments({ + worktreePath, + repoNameWithOwner, + pullRequestNumber: pullRequestTarget.prNumber, + }), + ), + ); + } catch (error) { + const cached = getCachedPullRequestCommentsState(cacheKey); + if (cached) { + console.warn( + "[GitHub] Failed to refresh pull request comments; using cached value:", + error, + ); + return cached.value; + } + + throw error; + } + }, + }); } catch { return []; } } +export async function fetchGitHubPreviewUrl({ + worktreePath, + githubStatus, + forceFresh = false, +}: { + worktreePath: string; + githubStatus?: GitHubStatus | null; + forceFresh?: boolean; +}): Promise { + const context = await resolveGitHubStatusContext(worktreePath); + if (!context) { + return null; + } + + const targetUrl = context.repoContext.isFork + ? context.repoContext.upstreamUrl + : context.repoContext.repoUrl; + const repoNameWithOwner = extractNwoFromUrl(targetUrl); + if (!repoNameWithOwner) { + return null; + } + + const cacheKey = makeGitHubPreviewCacheKey({ + worktreePath, + repoNameWithOwner, + branchName: context.previewBranchName, + headSha: context.headSha, + pullRequestNumber: githubStatus?.pr?.number, + }); + + if (isRateLimited()) { + const cached = getCachedGitHubPreviewUrl(cacheKey); + trackGitHubOperationEvent({ + name: "preview_refresh", + category: "sync", + worktreePath, + success: true, + durationMs: 0, + rateLimited: true, + }); + return cached; + } + + return trackGitHubOperation({ + name: "preview_refresh", + category: "sync", + worktreePath, + fn: async () => { + return readCachedGitHubPreviewUrl( + cacheKey, + () => + rateLimitedRefresh(() => + refreshGitHubPreviewUrl({ + worktreePath, + repoNameWithOwner, + branchName: context.previewBranchName, + headSha: context.headSha, + pullRequestNumber: githubStatus?.pr?.number, + }), + ), + { + forceFresh, + }, + ); + }, + }); +} + function isSafeHttpUrl(url: string): boolean { try { const parsed = new URL(url); @@ -311,11 +485,17 @@ async function queryDeploymentUrl( nwo: string, queryParams: string, ): Promise { - const { stdout } = await execWithShellEnv( - "gh", - ["api", `repos/${nwo}/deployments?${queryParams}&per_page=5`], - { cwd: worktreePath }, - ); + const { stdout } = await trackGitHubOperation({ + name: "gh_api_deployments", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv( + "gh", + ["api", `repos/${nwo}/deployments?${queryParams}&per_page=5`], + { cwd: worktreePath }, + ), + }); const rawDeployments: unknown = JSON.parse(stdout.trim()); if (!Array.isArray(rawDeployments) || rawDeployments.length === 0) { @@ -336,11 +516,17 @@ async function queryDeploymentUrl( const urls = await Promise.all( deploymentIds.map(async (id): Promise => { try { - const { stdout: out } = await execWithShellEnv( - "gh", - ["api", `repos/${nwo}/deployments/${id}/statuses?per_page=1`], - { cwd: worktreePath }, - ); + const { stdout: out } = await trackGitHubOperation({ + name: "gh_api_deployment_status", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv( + "gh", + ["api", `repos/${nwo}/deployments/${id}/statuses?per_page=1`], + { cwd: worktreePath }, + ), + }); const rawStatuses: unknown = JSON.parse(out.trim()); if (!Array.isArray(rawStatuses) || rawStatuses.length === 0) { return undefined; @@ -371,29 +557,26 @@ async function queryDeploymentUrl( * Fetches the preview deployment URL by trying multiple query strategies: * 1. By commit SHA (works for Vercel, Netlify official integrations) * 2. By branch name ref (works for some CI configurations) - * The PR merge ref (refs/pull/N/merge) is handled in fetchGitHubPRStatus - * after the PR number is known. + * 3. By PR merge ref when the PR number is already known */ -async function fetchPreviewDeploymentUrl( - worktreePath: string, - headSha: string | undefined, - branchName: string, - repoContext: RepoContext, -): Promise { +async function refreshGitHubPreviewUrl({ + worktreePath, + repoNameWithOwner, + headSha, + branchName, + pullRequestNumber, +}: { + worktreePath: string; + repoNameWithOwner: string; + headSha?: string; + branchName: string; + pullRequestNumber?: number; +}): Promise { try { - const targetUrl = repoContext.isFork - ? repoContext.upstreamUrl - : repoContext.repoUrl; - const nwo = extractNwoFromUrl(targetUrl); - if (!nwo) { - return undefined; - } - if (headSha) { - // Try by commit SHA (works for Vercel, Netlify official integrations) const bySha = await queryDeploymentUrl( worktreePath, - nwo, + repoNameWithOwner, `sha=${headSha}`, ); if (bySha) { @@ -401,13 +584,297 @@ async function fetchPreviewDeploymentUrl( } } - // Fall back to branch name (works for some CI configurations) - return await queryDeploymentUrl( + const byBranch = await queryDeploymentUrl( worktreePath, - nwo, + repoNameWithOwner, `ref=${encodeURIComponent(branchName)}`, ); + if (byBranch) { + return byBranch; + } + + if (!pullRequestNumber) { + return null; + } + + return ( + (await queryDeploymentUrl( + worktreePath, + repoNameWithOwner, + `ref=${encodeURIComponent(`refs/pull/${pullRequestNumber}/merge`)}`, + )) ?? null + ); } catch { - return undefined; + return null; + } +} + +export interface JobStepInfo { + name: string; + status: "queued" | "in_progress" | "completed"; + conclusion: string | null; + number: number; +} + +/** + * Extracts job ID from a GitHub Actions details URL. + * URL format: https://github.com/{owner}/{repo}/actions/runs/{run_id}/job/{job_id} + */ +function parseJobIdFromUrl(detailsUrl: string): string | null { + try { + const url = new URL(detailsUrl); + const match = url.pathname.match(/\/actions\/runs\/\d+\/job\/(\d+)/); + return match?.[1] ?? null; + } catch { + return null; + } +} + +/** + * Extracts nwo (owner/repo) from a GitHub Actions details URL. + */ +function parseNwoFromActionsUrl(detailsUrl: string): string | null { + try { + const url = new URL(detailsUrl); + const match = url.pathname.match(/^\/([^/]+\/[^/]+)\/actions\//); + return match?.[1] ?? null; + } catch { + return null; } } + +/** + * Fetches job steps for a given GitHub Actions check using its details URL. + */ +export async function fetchCheckJobSteps( + worktreePath: string, + detailsUrl: string, +): Promise { + const jobId = parseJobIdFromUrl(detailsUrl); + const nwo = parseNwoFromActionsUrl(detailsUrl); + if (!jobId || !nwo) { + return []; + } + + try { + const { stdout } = await trackGitHubOperation({ + name: "gh_api_actions_job", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv("gh", ["api", `repos/${nwo}/actions/jobs/${jobId}`], { + cwd: worktreePath, + }), + }); + + const raw: unknown = JSON.parse(stdout.trim()); + const result = GHJobResponseSchema.safeParse(raw); + if (!result.success) { + return []; + } + + return (result.data.steps ?? []).map((step) => ({ + name: step.name, + status: step.status, + conclusion: step.conclusion ?? null, + number: step.number, + })); + } catch { + return []; + } +} + +export interface StructuredJobStep { + name: string; + number: number; + status: "queued" | "in_progress" | "completed"; + conclusion: string | null; + durationSeconds: number | null; + logs: string; +} + +export interface StructuredJobResult { + jobStatus: "queued" | "in_progress" | "completed" | "waiting"; + jobConclusion: string | null; + steps: StructuredJobStep[]; +} + +/** + * Fetches job step metadata and logs, returning structured per-step data. + */ +export async function fetchStructuredJobLogs( + worktreePath: string, + detailsUrl: string, +): Promise { + const jobId = parseJobIdFromUrl(detailsUrl); + const nwo = parseNwoFromActionsUrl(detailsUrl); + const emptyResult: StructuredJobResult = { + jobStatus: "queued", + jobConclusion: null, + steps: [], + }; + if (!jobId || !nwo) { + return emptyResult; + } + + try { + // Always fetch job metadata; logs may 404 for in-progress jobs + const jobResult = await trackGitHubOperation({ + name: "gh_api_actions_job", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv("gh", ["api", `repos/${nwo}/actions/jobs/${jobId}`], { + cwd: worktreePath, + }), + }); + + const raw: unknown = JSON.parse(jobResult.stdout.trim()); + const result = GHJobResponseSchema.safeParse(raw); + if (!result.success || !result.data.steps) { + return emptyResult; + } + + const jobData = result.data; + const steps = jobData.steps ?? []; + const jobCompleted = jobData.status === "completed"; + + // Only fetch logs if job is completed (API returns 404 for in-progress) + let rawLogs = ""; + if (jobCompleted) { + try { + const logsResult = await trackGitHubOperation({ + name: "gh_api_actions_job_logs", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv( + "gh", + ["api", `repos/${nwo}/actions/jobs/${jobId}/logs`], + { cwd: worktreePath, maxBuffer: 10 * 1024 * 1024 }, + ), + }); + rawLogs = logsResult.stdout; + } catch { + // Logs not yet available + } + } + + // Parse raw logs into per-step sections. + // GitHub log format: each line starts with a timestamp like "2024-01-01T00:00:00.0000000Z " + // Steps are separated by ##[group] / ##[endgroup] markers, but these aren't always reliable. + // Instead, match by step started_at/completed_at time ranges. + const logLines = rawLogs.split("\n"); + const stepLogs: Map = new Map(); + + // Build time ranges for each step + const stepRanges = steps.map((step) => ({ + number: step.number, + start: step.started_at ? new Date(step.started_at).getTime() : 0, + end: step.completed_at + ? new Date(step.completed_at).getTime() + : Number.POSITIVE_INFINITY, + })); + + for (const line of logLines) { + const tsMatch = line.match( + /^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z)\s/, + ); + if (!tsMatch) continue; + const lineTime = new Date(tsMatch[1]).getTime(); + const lineContent = line.slice(tsMatch[0].length); + + // Find which step this line belongs to + for (const range of stepRanges) { + if (lineTime >= range.start && lineTime <= range.end + 1000) { + if (!stepLogs.has(range.number)) { + stepLogs.set(range.number, []); + } + stepLogs.get(range.number)?.push(lineContent); + break; + } + } + } + + return { + jobStatus: jobData.status, + jobConclusion: jobData.conclusion ?? null, + steps: steps.map((step) => { + let durationSeconds: number | null = null; + if (step.started_at && step.completed_at) { + durationSeconds = Math.round( + (new Date(step.completed_at).getTime() - + new Date(step.started_at).getTime()) / + 1000, + ); + } + return { + name: step.name, + number: step.number, + status: step.status, + conclusion: step.conclusion ?? null, + durationSeconds, + logs: stepLogs.get(step.number)?.join("\n") ?? "", + }; + }), + }; + } catch (err) { + console.error("[fetchStructuredJobLogs] Failed:", err); + return emptyResult; + } +} + +export interface JobStatusInfo { + detailsUrl: string; + status: "queued" | "in_progress" | "completed" | "waiting"; + conclusion: string | null; +} + +/** + * Fetches current status for multiple jobs in parallel. + */ +export async function fetchJobStatuses( + worktreePath: string, + detailsUrls: string[], +): Promise { + const results = await Promise.allSettled( + detailsUrls.map(async (detailsUrl) => { + const jobId = parseJobIdFromUrl(detailsUrl); + const nwo = parseNwoFromActionsUrl(detailsUrl); + if (!jobId || !nwo) { + return { detailsUrl, status: "queued" as const, conclusion: null }; + } + const { stdout } = await trackGitHubOperation({ + name: "gh_api_actions_job_status", + category: "gh", + worktreePath, + fn: () => + execWithShellEnv( + "gh", + [ + "api", + `repos/${nwo}/actions/jobs/${jobId}`, + "--jq", + '.status + "|" + (.conclusion // "")', + ], + { cwd: worktreePath }, + ), + }); + const [status, conclusion] = stdout.trim().split("|"); + return { + detailsUrl, + status: (status || "queued") as JobStatusInfo["status"], + conclusion: conclusion || null, + }; + }), + ); + return results.map((r, i) => + r.status === "fulfilled" + ? r.value + : { + detailsUrl: detailsUrls[i], + status: "queued" as const, + conclusion: null, + }, + ); +} diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/index.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/index.ts index 37e7dca32a3..5d93d9df4da 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/index.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/index.ts @@ -1,14 +1,24 @@ export type { PullRequestCommentsTarget } from "./github"; export { + addPullRequestConversationComment, clearGitHubCachesForWorktree, + fetchCheckJobSteps, fetchGitHubPRComments, fetchGitHubPRStatus, + fetchGitHubPreviewUrl, + fetchJobStatuses, + fetchStructuredJobLogs, + replyToReviewThread, resolveReviewThread, } from "./github"; +export { isRateLimited } from "./github-rate-limiter"; +export { githubSyncService } from "./github-sync-service"; export { getPRForBranch } from "./pr-resolution"; export { extractNwoFromUrl, getPullRequestRepoArgs, + getPullRequestRepoNamesForWorktree, getRepoContext, + getTrackingRepoUrl, normalizeGitHubUrl, } from "./repo-context"; diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/pr-attachment.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/pr-attachment.ts new file mode 100644 index 00000000000..11da86fa025 --- /dev/null +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/pr-attachment.ts @@ -0,0 +1,152 @@ +import type { GitHubStatus } from "@superset/local-db"; +import { normalizeGitHubUrl } from "./repo-context"; + +type PullRequest = NonNullable; + +export interface GitRemoteInfo { + name: string; + fetchUrl?: string; + pushUrl?: string; +} + +export interface GitTrackingRefInfo { + remoteName: string; + branchName: string; +} + +export interface PullRequestPushTargetInfo { + remote: string; + targetBranch: string; +} + +export function isOpenPullRequestState(state: PullRequest["state"]): boolean { + return state === "open" || state === "draft"; +} + +export function getPullRequestHeadRepoUrl( + pr: Pick< + PullRequest, + "headRepositoryOwner" | "headRepositoryName" | "isCrossRepository" + >, +): string | null { + if ( + !pr.isCrossRepository || + !pr.headRepositoryOwner || + !pr.headRepositoryName + ) { + return null; + } + + return `https://github.com/${pr.headRepositoryOwner}/${pr.headRepositoryName}`; +} + +export function resolveRemoteNameForPullRequestHead({ + remotes, + pr, + fallbackRemote, +}: { + remotes: GitRemoteInfo[]; + pr: Pick< + PullRequest, + "headRepositoryOwner" | "headRepositoryName" | "isCrossRepository" + >; + fallbackRemote: string; +}): string | null { + if (!pr.isCrossRepository) { + return fallbackRemote; + } + + const headRepoUrl = getPullRequestHeadRepoUrl(pr); + if (!headRepoUrl) { + return null; + } + + const normalizedHeadRepoUrl = normalizeGitHubUrl(headRepoUrl); + if (!normalizedHeadRepoUrl) { + return null; + } + + for (const remote of remotes) { + const fetchUrl = remote.fetchUrl + ? normalizeGitHubUrl(remote.fetchUrl) + : null; + const pushUrl = remote.pushUrl ? normalizeGitHubUrl(remote.pushUrl) : null; + if ( + fetchUrl === normalizedHeadRepoUrl || + pushUrl === normalizedHeadRepoUrl + ) { + return remote.name; + } + } + + return null; +} + +export function resolveOpenPullRequestPushTarget({ + pr, + remotes, + fallbackRemote, +}: { + pr: Pick< + PullRequest, + | "headRefName" + | "headRepositoryOwner" + | "headRepositoryName" + | "isCrossRepository" + | "state" + >; + remotes: GitRemoteInfo[]; + fallbackRemote: string; +}): PullRequestPushTargetInfo | null { + if (!isOpenPullRequestState(pr.state)) { + return null; + } + + const targetBranch = pr.headRefName?.trim(); + if (!targetBranch) { + return null; + } + + const remote = resolveRemoteNameForPullRequestHead({ + remotes, + pr, + fallbackRemote, + }); + if (!remote) { + return null; + } + + return { + remote, + targetBranch, + }; +} + +export function canAttachPullRequestToWorkspace({ + pr, + remotes, + fallbackRemote, +}: { + pr: Pick< + PullRequest, + | "headRefName" + | "headRepositoryOwner" + | "headRepositoryName" + | "isCrossRepository" + | "state" + >; + remotes: GitRemoteInfo[]; + fallbackRemote: string; +}): boolean { + if (!isOpenPullRequestState(pr.state)) { + return true; + } + + return ( + resolveOpenPullRequestPushTarget({ + pr, + remotes, + fallbackRemote, + }) !== null + ); +} diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/repo-context.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/repo-context.ts index 6091754b7ff..9d345174615 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/repo-context.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/repo-context.ts @@ -1,5 +1,6 @@ import { execGitWithShellPath } from "../git-client"; import { execWithShellEnv } from "../shell-env"; +import { parseUpstreamRef } from "../upstream-ref"; import { getCachedRepoContextState, readCachedRepoContext } from "./cache"; import { GHRepoResponseSchema, type RepoContext } from "./types"; @@ -21,21 +22,23 @@ async function refreshRepoContext( } const data = result.data; - let context: RepoContext; + let context: RepoContext | undefined; if (data.isFork && data.parent) { - context = { - repoUrl: data.url, - upstreamUrl: data.parent.url, - isFork: true, - }; - } else { - const originUrl = await getOriginUrl(worktreePath); - const ghUrl = normalizeGitHubUrl(data.url); + const upstreamUrl = + data.parent.url ?? + (data.parent.owner?.login && data.parent.name + ? `https://github.com/${data.parent.owner.login}/${data.parent.name}` + : null); - if (data.isFork) { - return null; + if (upstreamUrl) { + context = { repoUrl: data.url, upstreamUrl, isFork: true }; } + } + + if (!context) { + const originUrl = await getOriginUrl(worktreePath); + const ghUrl = normalizeGitHubUrl(data.url); if (originUrl && ghUrl && originUrl !== ghUrl) { context = { @@ -43,6 +46,14 @@ async function refreshRepoContext( upstreamUrl: ghUrl, isFork: true, }; + } else if (data.isFork) { + // Fork but upstream URL could not be determined — surface as error + // rather than silently treating as non-fork (which would misdirect PRs) + console.warn( + "[GitHub] Fork detected but upstream URL could not be resolved", + { url: data.url }, + ); + return null; } else { context = { repoUrl: data.url, @@ -92,7 +103,7 @@ export function shouldRefreshCachedRepoContext({ cachedRepoContext: RepoContext | null; }): boolean { if (!cachedRepoContext) { - return false; + return true; } const normalizedOriginUrl = normalizeGitHubUrl( @@ -110,9 +121,20 @@ export function shouldRefreshCachedRepoContext({ } async function getOriginUrl(worktreePath: string): Promise { + try { + return getRemoteUrl(worktreePath, "origin"); + } catch { + return null; + } +} + +async function getRemoteUrl( + worktreePath: string, + remoteName: string, +): Promise { try { const { stdout } = await execGitWithShellPath( - ["remote", "get-url", "origin"], + ["remote", "get-url", remoteName], { cwd: worktreePath }, ); return normalizeGitHubUrl(stdout.trim()); @@ -139,13 +161,86 @@ export function normalizeGitHubUrl(remoteUrl: string): string | null { export function extractNwoFromUrl(normalizedUrl: string): string | null { try { - const path = new URL(normalizedUrl).pathname.slice(1); - return path || null; + const segments = new URL(normalizedUrl).pathname.split("/").filter(Boolean); + if (segments.length < 2) { + return null; + } + return `${segments[0]}/${segments[1]}`; } catch { return null; } } +export function getPullRequestRepoNames( + repoContext?: Pick | null, +): string[] { + if (!repoContext) { + return []; + } + + const candidates = [ + repoContext.repoUrl, + repoContext.isFork ? repoContext.upstreamUrl : null, + ]; + + return Array.from( + new Set( + candidates + .map((candidate) => normalizeGitHubUrl(candidate ?? "")) + .filter((candidate): candidate is string => Boolean(candidate)) + .map((candidate) => extractNwoFromUrl(candidate)) + .filter((candidate): candidate is string => Boolean(candidate)), + ), + ); +} + +export async function getTrackingRepoUrl( + worktreePath: string, +): Promise { + try { + const { stdout } = await execGitWithShellPath( + ["rev-parse", "--abbrev-ref", "@{upstream}"], + { cwd: worktreePath }, + ); + const parsed = parseUpstreamRef(stdout.trim()); + if (!parsed) { + return null; + } + + return getRemoteUrl(worktreePath, parsed.remoteName); + } catch { + return null; + } +} + +export async function getPullRequestRepoNamesForWorktree({ + worktreePath, + repoContext, +}: { + worktreePath: string; + repoContext?: Pick | null; +}): Promise { + const [resolvedRepoContext, trackingRepoUrl] = await Promise.all([ + repoContext ? Promise.resolve(repoContext) : getRepoContext(worktreePath), + getTrackingRepoUrl(worktreePath), + ]); + + const candidates = [ + trackingRepoUrl, + ...getPullRequestRepoNames(resolvedRepoContext), + ]; + + return Array.from( + new Set( + candidates + .map((candidate) => normalizeGitHubUrl(candidate ?? "")) + .filter((candidate): candidate is string => Boolean(candidate)) + .map((candidate) => extractNwoFromUrl(candidate)) + .filter((candidate): candidate is string => Boolean(candidate)), + ), + ); +} + export function getPullRequestRepoArgs( repoContext?: Pick | null, ): string[] { diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/types.ts b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/types.ts index 7f379fcffc9..6283748778a 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/types.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/utils/github/types.ts @@ -32,6 +32,15 @@ export const GHReviewRequestSchema = z.object({ type: z.enum(["User", "Team"]).optional(), }); +export const GHUserSchema = z.object({ + login: z.string().optional(), +}); + +export const GHIdentityCandidateUserSchema = z.object({ + login: z.string().optional(), + avatarUrl: z.string().optional(), +}); + export const GHCommentAuthorSchema = z.object({ login: z.string().optional(), avatar_url: z.string().optional(), @@ -78,6 +87,21 @@ export const GHPageInfoSchema = z.object({ endCursor: z.string().nullable(), }); +export const GHUsersConnectionSchema = z.object({ + nodes: z.array(GHIdentityCandidateUserSchema.nullable()).optional(), + pageInfo: GHPageInfoSchema, +}); + +export const GHIdentityCandidatesResponseSchema = z.object({ + data: z.object({ + repository: z + .object({ + users: GHUsersConnectionSchema, + }) + .nullable(), + }), +}); + export const GHReviewThreadCommentsConnectionSchema = z.object({ nodes: z.array(GHReviewThreadCommentSchema.nullable()).optional(), pageInfo: GHPageInfoSchema, @@ -155,12 +179,20 @@ export const GHPRResponseSchema = z.object({ statusCheckRollup: z.array(GHCheckContextSchema).nullable(), comments: z.array(GHCommentSchema).nullable().optional(), reviewRequests: z.array(GHReviewRequestSchema).nullable().optional(), + assignees: z.array(GHUserSchema).nullable().optional(), }); export const GHRepoResponseSchema = z.object({ url: z.string(), isFork: z.boolean().optional().default(false), - parent: z.object({ url: z.string() }).nullable().optional(), + parent: z + .object({ + url: z.string().optional(), + name: z.string().optional(), + owner: z.object({ login: z.string() }).optional(), + }) + .nullable() + .optional(), }); export interface RepoContext { @@ -171,6 +203,38 @@ export interface RepoContext { export type GHPRResponse = z.infer; +/** + * GitHub Actions job step schema + */ +export const GHJobStepSchema = z.object({ + name: z.string(), + status: z.enum(["queued", "in_progress", "completed"]), + conclusion: z + .enum(["success", "failure", "cancelled", "skipped", ""]) + .nullable() + .optional(), + number: z.number(), + started_at: z.string().nullable().optional(), + completed_at: z.string().nullable().optional(), +}); + +export type GHJobStep = z.infer; + +export const GHJobResponseSchema = z.object({ + id: z.number(), + name: z.string(), + status: z.enum(["queued", "in_progress", "completed", "waiting"]), + conclusion: z + .enum(["success", "failure", "cancelled", "skipped", "timed_out", ""]) + .nullable() + .optional(), + started_at: z.string().nullable().optional(), + completed_at: z.string().nullable().optional(), + steps: z.array(GHJobStepSchema).optional(), +}); + +export type GHJobResponse = z.infer; + export const GHDeploymentSchema = z.object({ id: z.number(), ref: z.string(), diff --git a/apps/desktop/src/lib/trpc/routers/workspaces/workspaces.ts b/apps/desktop/src/lib/trpc/routers/workspaces/workspaces.ts index 5c0cbdd2564..da7c849b953 100644 --- a/apps/desktop/src/lib/trpc/routers/workspaces/workspaces.ts +++ b/apps/desktop/src/lib/trpc/routers/workspaces/workspaces.ts @@ -1,4 +1,5 @@ -import { mergeRouters } from "../.."; +import { mergeRouters, router } from "../.."; +import { createGithubExtendedRouter } from "./github-extended"; import { createCreateProcedures } from "./procedures/create"; import { createDeleteProcedures } from "./procedures/delete"; import { createGenerateBranchNameProcedures } from "./procedures/generate-branch-name"; @@ -14,6 +15,7 @@ export const createWorkspacesRouter = () => { createDeleteProcedures(), createQueryProcedures(), createGitStatusProcedures(), + router({ githubExtended: createGithubExtendedRouter() }), createStatusProcedures(), createInitProcedures(), createSectionsProcedures(), diff --git a/apps/desktop/src/lib/window-loader.ts b/apps/desktop/src/lib/window-loader.ts index d31c07dde09..1b01eb63c66 100644 --- a/apps/desktop/src/lib/window-loader.ts +++ b/apps/desktop/src/lib/window-loader.ts @@ -2,7 +2,7 @@ import type { BrowserWindow } from "electron"; import { env } from "shared/env.shared"; /** Window IDs defined in the router configuration */ -type WindowId = "main" | "about"; +type WindowId = "main" | "about" | "tearoff"; /** * Load an Electron window with the appropriate URL for TanStack Router. diff --git a/apps/desktop/src/main/extension-host-worker/index.ts b/apps/desktop/src/main/extension-host-worker/index.ts new file mode 100644 index 00000000000..f69c1f2ed3b --- /dev/null +++ b/apps/desktop/src/main/extension-host-worker/index.ts @@ -0,0 +1,275 @@ +/** + * Extension Host Worker — Per-Workspace Subprocess Entry Point + * + * Spawned by ExtensionHostManager via child_process.spawn(). + * Each workspace gets its own instance of this process, providing + * full isolation of extension state, workspace path, and webview providers. + * + * Run with: ELECTRON_RUN_AS_NODE=1 electron dist/main/extension-host-worker.js + */ + +import os from "node:os"; +import path from "node:path"; +import type { + MainToWorkerMessage, + WorkerToMainMessage, +} from "../lib/vscode-shim/ipc-types"; + +// Read config from environment +const workspacePath = process.env.EXTENSION_HOST_WORKSPACE_PATH ?? ""; +const workspaceId = process.env.EXTENSION_HOST_WORKSPACE_ID ?? ""; +const extensionsDir = + process.env.EXTENSION_HOST_EXTENSIONS_DIR ?? + path.join(os.homedir(), ".vscode", "extensions"); + +function send(msg: WorkerToMainMessage): void { + process.send?.(msg); +} + +async function main() { + console.log( + `[ext-host-worker:${workspaceId}] Starting with workspace: ${workspacePath}`, + ); + + // Import shim modules (each process gets its own copy) + const { setWorkspacePath } = await import("../lib/vscode-shim/api/workspace"); + const { + setActiveTextEditor, + onOpenFile, + onOpenDiff, + setSendToMain, + resolveDialogResult, + resolveOpenDialogResult, + } = await import("../lib/vscode-shim/api/window"); + setSendToMain(send); + const { commands } = await import("../lib/vscode-shim/api/commands"); + const { discoverExtensions, loadExtension, deactivateAll } = await import( + "../lib/vscode-shim/loader" + ); + const { getActivePanel, getActiveView, onWebviewEvent, resolveWebviewView } = + await import("../lib/vscode-shim/api/webview"); + const { registerExtensionDefaults } = await import( + "../lib/vscode-shim/api/configuration" + ); + + // Read enabled config + let enabledConfig: Record = {}; + try { + const enabledConfigPath = process.env.EXTENSION_HOST_ENABLED_CONFIG; + if (enabledConfigPath) { + const fs = await import("node:fs"); + if (fs.existsSync(enabledConfigPath)) { + enabledConfig = JSON.parse(fs.readFileSync(enabledConfigPath, "utf-8")); + } + } + } catch {} + + // Set workspace path + if (workspacePath) { + setWorkspacePath(workspacePath); + } + + // Set platform context + const platform = + process.platform === "darwin" + ? "darwin" + : process.platform === "win32" + ? "windows" + : "linux"; + commands.executeCommand("setContext", "os", platform); + + // Listen for webview events and relay to main process + onWebviewEvent((event) => { + send({ type: "webview-event", event }); + }); + + // Listen for file open requests + onOpenFile((data) => { + send({ type: "open-file", filePath: data.filePath, line: data.line }); + }); + + // Listen for diff open requests + onOpenDiff((data) => { + send({ + type: "open-diff", + leftUri: data.leftUri, + rightUri: data.rightUri, + title: data.title, + leftContent: data.leftContent, + }); + }); + + // Supported extension IDs + const SUPPORTED_EXTENSIONS = new Set( + ( + process.env.EXTENSION_HOST_SUPPORTED_IDS ?? + "anthropic.claude-code,openai.chatgpt,moonshot-ai.kimi-code" + ) + .split(",") + .map((s) => s.trim()), + ); + + // Discover and load extensions + const discovered = discoverExtensions(extensionsDir); + const toLoad = discovered.filter((ext) => SUPPORTED_EXTENSIONS.has(ext.id)); + + // Pick latest version for each extension + const byId = new Map(); + for (const ext of toLoad) { + const existing = byId.get(ext.id); + if (!existing || ext.manifest.version > existing.manifest.version) { + byId.set(ext.id, ext); + } + } + + for (const ext of byId.values()) { + if (enabledConfig[ext.id] === false) { + console.log( + `[ext-host-worker:${workspaceId}] Skipping disabled: ${ext.id}`, + ); + continue; + } + try { + registerExtensionDefaults(ext.manifest); + await loadExtension(ext); + console.log(`[ext-host-worker:${workspaceId}] Loaded: ${ext.id}`); + } catch (err) { + console.error( + `[ext-host-worker:${workspaceId}] Failed to load ${ext.id}:`, + err, + ); + } + } + + // Handle IPC messages from main process + process.on("message", async (msg: MainToWorkerMessage) => { + switch (msg.type) { + case "set-active-editor": + setActiveTextEditor(msg.filePath, msg.languageId); + break; + + case "set-workspace-path": + setWorkspacePath(msg.workspacePath); + break; + + case "resolve-webview": { + const result = resolveWebviewView(msg.viewType, msg.extensionPath); + if (result) { + const { viewId, view } = result; + // Get HTML (may be set synchronously or async) + let html = (view.webview as { html?: string }).html ?? null; + + // If HTML not yet set, wait up to 5s + if (!html) { + html = await new Promise((resolve) => { + let settled = false; + let interval: ReturnType | null = null; + let timeout: ReturnType | null = null; + + const finish = (value: string | null) => { + if (settled) return; + settled = true; + if (interval !== null) { + clearInterval(interval); + interval = null; + } + if (timeout !== null) { + clearTimeout(timeout); + timeout = null; + } + resolve(value); + }; + + const checkHtml = () => + (view.webview as { html?: string }).html ?? null; + const immediate = checkHtml(); + if (immediate) { + finish(immediate); + return; + } + interval = setInterval(() => { + const h = checkHtml(); + if (h) finish(h); + }, 200); + timeout = setTimeout(() => { + finish(checkHtml()); + }, 5000); + }); + } + + send({ + type: "resolve-webview-result", + requestId: msg.requestId, + viewId, + html, + }); + } else { + send({ + type: "resolve-webview-result", + requestId: msg.requestId, + viewId: null, + html: null, + }); + } + break; + } + + case "post-message": { + const target = getActiveView(msg.viewId) ?? getActivePanel(msg.viewId); + if (target) { + const webview = target.webview as { + _onDidReceiveMessage?: { fire(data: unknown): void }; + }; + webview._onDidReceiveMessage?.fire(msg.message); + } + break; + } + + case "shutdown": + await deactivateAll(); + process.exit(0); + break; + + case "dialog-result": + resolveDialogResult(msg.requestId, msg.selectedIndex); + break; + + case "open-dialog-result": + resolveOpenDialogResult(msg.requestId, msg.filePaths); + break; + } + }); + + // Signal ready + send({ type: "ready" }); + console.log(`[ext-host-worker:${workspaceId}] Ready`); +} + +main().catch((err) => { + console.error(`[ext-host-worker:${workspaceId}] Fatal error:`, err); + process.exit(1); +}); + +// Graceful shutdown +process.on("SIGTERM", async () => { + try { + const { deactivateAll } = await import("../lib/vscode-shim/loader"); + await deactivateAll(); + } catch {} + process.exit(0); +}); + +// Orphan check: exit if parent dies +const parentPid = process.ppid; +const parentCheck = setInterval(() => { + try { + process.kill(parentPid, 0); + } catch { + clearInterval(parentCheck); + console.log( + `[ext-host-worker:${workspaceId}] Parent exited, shutting down`, + ); + process.exit(0); + } +}, 5000); +parentCheck.unref(); diff --git a/apps/desktop/src/main/index.ts b/apps/desktop/src/main/index.ts index 3c0eabe04c6..6fa586e9c18 100644 --- a/apps/desktop/src/main/index.ts +++ b/apps/desktop/src/main/index.ts @@ -1,6 +1,7 @@ import path from "node:path"; import { pathToFileURL } from "node:url"; -import { settings } from "@superset/local-db"; +import { projects, settings, workspaces } from "@superset/local-db"; +import { desc, eq, isNull } from "drizzle-orm"; import { app, BrowserWindow, @@ -16,6 +17,7 @@ import { loadToken, parseAuthDeepLink, } from "lib/trpc/routers/auth/utils/auth-functions"; +import { fetchGitHubOwner } from "lib/trpc/routers/projects/utils/github"; import { applyShellEnvToProcess } from "lib/trpc/routers/workspaces/utils/shell-env"; import { env as mainEnv } from "main/env.main"; import { @@ -27,20 +29,41 @@ import { setupAgentHooks } from "./lib/agent-setup"; import { initAppState } from "./lib/app-state"; import { requestAppleEventsAccess } from "./lib/apple-events-permission"; import { setupAutoUpdater } from "./lib/auto-updater"; +import { initializeBrowserIdentityManager } from "./lib/browser/browser-identity-manager"; +import { browserSitePermissionManager } from "./lib/browser/browser-site-permission-manager"; +import { initializeBrowserWebviewCompat } from "./lib/browser/browser-webview-compat"; import { resolveDevWorkspaceName } from "./lib/dev-workspace-name"; import { setWorkspaceDockIcon } from "./lib/dock-icon"; import { loadWebviewBrowserExtension } from "./lib/extensions"; -import { getHostServiceCoordinator } from "./lib/host-service-coordinator"; -import { localDb } from "./lib/local-db"; +import { createExtensionIconProtocolHandler } from "./lib/extensions/extension-icon-protocol"; +import { loadInstalledExtensions } from "./lib/extensions/extension-manager"; +// FORK NOTE: upstream renamed host-service-manager → host-service-coordinator (#3250 relay) +// Aliased as getHostServiceManager to minimize diff with fork's quit lifecycle code +import { getHostServiceCoordinator as getHostServiceManager } from "./lib/host-service-coordinator"; +import { closeLocalDb, localDb } from "./lib/local-db"; import { requestLocalNetworkAccess } from "./lib/local-network-permission"; import { ensureProjectIconsDir, getProjectIconPath } from "./lib/project-icons"; +import { reportError } from "./lib/report-error"; import { initSentry } from "./lib/sentry"; +import { setupServiceStatusPolling } from "./lib/service-status"; +import { createTempAudioProtocolHandler } from "./lib/temp-audio-protocol"; import { prewarmTerminalRuntime, reconcileDaemonSessions, } from "./lib/terminal"; import { disposeTray, initTray } from "./lib/tray"; -import { MainWindow } from "./windows/main"; +import { windowManager } from "./lib/window-manager"; +import { createWorkspaceMediaProtocolHandler } from "./lib/workspace-media-protocol"; + +// Lazy import to avoid module resolution issues during Vite build +const loadVscodeShim = () => + import("./lib/vscode-shim") as Promise; + +import { + cleanupMainWindowResources, + initNotifications, + MainWindow, +} from "./windows/main"; console.log("[main] Local database ready:", !!localDb); const IS_DEV = process.env.NODE_ENV === "development"; @@ -68,6 +91,176 @@ if (process.defaultApp) { app.setAsDefaultProtocolClient(PROTOCOL_SCHEME); } +function normalizeRepoValue( + value: string, +): { owner: string | null; repo: string } | null { + const trimmed = value.trim(); + if (!trimmed) return null; + + let candidate = trimmed.replace(/\.git$/i, ""); + + if (/^https?:\/\//i.test(candidate)) { + try { + const url = new URL(candidate); + candidate = url.pathname.replace(/^\/+/, "").replace(/\/+$/, ""); + } catch { + return null; + } + } + + candidate = candidate.replace(/^github\.com[/:]/i, ""); + const parts = candidate + .split("/") + .map((part) => part.trim()) + .filter(Boolean); + + if (parts.length >= 2) { + return { + owner: parts[parts.length - 2].toLowerCase(), + repo: parts[parts.length - 1].toLowerCase(), + }; + } + + if (parts.length === 1) { + return { + owner: null, + repo: parts[0].toLowerCase(), + }; + } + + return null; +} + +function normalizeOptionalPositiveInt(value: string | null): string | null { + if (!value) return null; + const parsed = Number(value); + if (!Number.isInteger(parsed) || parsed <= 0) return null; + return String(parsed); +} + +async function resolveWorkspaceOpenRouteFromDeepLink( + url: URL, +): Promise { + const repoParam = url.searchParams.get("repo"); + const fileParam = url.searchParams.get("file"); + const branchParam = url.searchParams.get("branch")?.trim() || null; + const normalizedRepo = repoParam ? normalizeRepoValue(repoParam) : null; + + if (!normalizedRepo) { + return null; + } + + const candidates = localDb + .select({ + workspaceId: workspaces.id, + workspaceBranch: workspaces.branch, + lastOpenedAt: workspaces.lastOpenedAt, + projectGithubOwner: projects.githubOwner, + projectId: projects.id, + projectMainRepoPath: projects.mainRepoPath, + }) + .from(workspaces) + .innerJoin(projects, eq(workspaces.projectId, projects.id)) + .where(isNull(workspaces.deletingAt)) + .orderBy(desc(workspaces.lastOpenedAt)) + .all(); + const candidatesWithOwner = await Promise.all( + candidates.map(async (row) => { + if (row.projectGithubOwner) { + return row; + } + + const projectGithubOwner = await fetchGitHubOwner( + row.projectMainRepoPath, + ); + if (!projectGithubOwner) { + return row; + } + + localDb + .update(projects) + .set({ githubOwner: projectGithubOwner }) + .where(eq(projects.id, row.projectId)) + .run(); + + return { + ...row, + projectGithubOwner, + }; + }), + ); + const filteredCandidates = candidatesWithOwner.filter((row) => { + const repoName = path.basename(row.projectMainRepoPath).toLowerCase(); + if (repoName !== normalizedRepo.repo) { + return false; + } + + if (!normalizedRepo.owner) { + return true; + } + + return ( + (row.projectGithubOwner ?? "").toLowerCase() === normalizedRepo.owner + ); + }); + + if (filteredCandidates.length === 0) { + return null; + } + + const match = + (branchParam + ? filteredCandidates.find( + (candidate) => candidate.workspaceBranch === branchParam, + ) + : null) ?? filteredCandidates[0]; + + if (!match) { + return null; + } + + const params = new URLSearchParams(); + if (fileParam?.trim()) { + params.set("file", fileParam.trim()); + } + + const line = normalizeOptionalPositiveInt(url.searchParams.get("line")); + if (line) { + params.set("line", line); + } + + const column = normalizeOptionalPositiveInt(url.searchParams.get("column")); + if (column) { + params.set("column", column); + } + + const search = params.toString(); + return `/workspace/${match.workspaceId}${search ? `?${search}` : ""}`; +} + +async function getRendererPathFromDeepLink( + urlString: string, +): Promise { + let parsed: URL; + try { + parsed = new URL(urlString); + } catch { + return null; + } + + if (parsed.hostname === "open") { + return ( + (await resolveWorkspaceOpenRouteFromDeepLink(parsed)) ?? "/workspace" + ); + } + + const host = parsed.hostname ? `/${parsed.hostname}` : ""; + const routePath = parsed.pathname === "/" ? "" : parsed.pathname; + const search = parsed.search || ""; + const hash = parsed.hash || ""; + return `${host}${routePath}${search}${hash}` || "/"; +} + async function processDeepLink(url: string): Promise { console.log("[main] Processing deep link:", url); @@ -82,9 +275,12 @@ async function processDeepLink(url: string): Promise { return; } - // Non-auth deep links: extract path and navigate in renderer - // e.g. superset://tasks/my-slug -> /tasks/my-slug - const path = `/${url.split("://")[1]}`; + const path = await getRendererPathFromDeepLink(url); + if (!path) { + console.error("[main] Failed to resolve deep link route:", url); + return; + } + focusMainWindow(); const windows = BrowserWindow.getAllWindows(); @@ -153,19 +349,26 @@ app.on("open-url", async (event, url) => { } }); +export type QuitMode = "release" | "stop"; +let pendingQuitMode: QuitMode | null = null; let isQuitting = false; -let skipQuitConfirmation = false; -export function setSkipQuitConfirmation(): void { - skipQuitConfirmation = true; +/** Request the app to quit. + * - "release": keep services running (re-adoptable on next launch) + * - "stop": terminate all services before exit */ +export function requestQuit(mode: QuitMode): void { + pendingQuitMode = mode; + app.quit(); } -export function quitApp(): void { - setSkipQuitConfirmation(); - app.quit(); +/** Set quit mode without triggering quit. + * Use when another API (e.g. autoUpdater.quitAndInstall) triggers quit internally. */ +export function prepareQuit(mode: QuitMode): void { + pendingQuitMode = mode; } -/** Bypasses before-quit — services are left running for re-adoption on next launch. */ +/** Exit the process immediately, bypassing before-quit. + * Services are left running for adoption on next launch. */ export function exitImmediately(): void { app.exit(0); } @@ -182,8 +385,14 @@ function getConfirmOnQuitSetting(): boolean { app.on("before-quit", async (event) => { if (isQuitting) return; + // Consume the quit mode so it doesn't persist across aborted quits + const quitMode = pendingQuitMode; + pendingQuitMode = null; + + // FORK NOTE: macOS tray-stay-alive block removed to match upstream (#3205). + // cleanupMainWindowResources() moved to the exit path below. const isDev = process.env.NODE_ENV === "development"; - if (!skipQuitConfirmation && !isDev && getConfirmOnQuitSetting()) { + if (quitMode === null && !isDev && getConfirmOnQuitSetting()) { event.preventDefault(); try { @@ -205,23 +414,90 @@ app.on("before-quit", async (event) => { } isQuitting = true; + // FORK NOTE: cleanup window resources before exit to prevent port conflicts + cleanupMainWindowResources(); + // Fork-local: stop the todo-agent scheduler before closing local-db so an + // in-flight tick can't insert a session into a closed SQLite handle. try { - getHostServiceCoordinator().releaseAll(); - disposeTray(); + const { getTodoScheduler } = await import("./todo-agent/scheduler"); + getTodoScheduler().stop(); } catch (error) { - console.error("[main] Cleanup during quit failed:", error); + console.warn("[main] todo-agent scheduler stop skipped", error); } + // Disconnect from the todo-agent daemon but leave it running so + // `claude -p` child processes survive the app restart (issue #237). + try { + const { stopTodoAgentDaemonBridge } = await import( + "./todo-agent/daemon-bridge" + ); + stopTodoAgentDaemonBridge(); + } catch (error) { + console.warn("[main] todo-agent daemon bridge stop skipped", error); + } + try { + const mod = await loadVscodeShim(); + await mod.shutdownExtensionHost(); + } catch {} + closeLocalDb(); + const manager = getHostServiceManager(); + if (quitMode === "stop") { + manager.stopAll(); + } else { + manager.releaseAll(); + } + disposeTray(); + + // app.exit() bypasses beforeunload in renderer processes, so tearoff windows + // never return their tabs via the normal beforeunload path. Collect them here + // and merge into persisted tabsState before the process exits. + try { + const { appState } = await import("./lib/app-state"); + const tearoffTabs = await windowManager.collectAllTearoffTabs(1500); + if (tearoffTabs.length > 0) { + const current = appState.data.tabsState; + const existingIds = new Set(current.tabs.map((t) => t.id)); + const newEntries = tearoffTabs.filter( + ({ tab }) => !existingIds.has((tab as { id: string }).id), + ); + if (newEntries.length > 0) { + const newPanes: Record = {}; + for (const { panes } of newEntries) { + Object.assign(newPanes, panes); + } + appState.data.tabsState = { + ...current, + tabs: [ + ...current.tabs, + ...newEntries.map(({ tab }) => tab as (typeof current.tabs)[0]), + ], + panes: { ...current.panes, ...newPanes } as typeof current.panes, + }; + await appState.write(); + } + } + } catch (error) { + console.error("[main] Failed to collect tearoff tabs before quit:", error); + } + app.exit(0); }); process.on("uncaughtException", (error) => { if (isQuitting) return; console.error("[main] Uncaught exception:", error); + reportError(error, { + severity: "fatal", + tags: { subsystem: "main", handler: "uncaughtException" }, + }); }); process.on("unhandledRejection", (reason) => { if (isQuitting) return; console.error("[main] Unhandled rejection:", reason); + reportError(reason, { + severity: "error", + tags: { subsystem: "main", handler: "unhandledRejection" }, + }); }); // Without these handlers, Electron may not quit when electron-vite sends SIGTERM @@ -274,6 +550,54 @@ protocol.registerSchemesAsPrivileged([ supportFetchAPI: true, }, }, + { + scheme: "superset-ext-icon", + privileges: { + standard: true, + secure: true, + bypassCSP: true, + supportFetchAPI: true, + }, + }, + { + scheme: "superset-temp-audio", + privileges: { + standard: true, + secure: true, + bypassCSP: true, + supportFetchAPI: true, + stream: true, + }, + }, + { + scheme: "superset-workspace-media", + privileges: { + standard: true, + secure: true, + bypassCSP: true, + supportFetchAPI: true, + stream: true, + }, + }, + { + scheme: "vscode-webview-resource", + privileges: { + standard: true, + secure: true, + bypassCSP: true, + supportFetchAPI: true, + stream: true, + }, + }, + { + scheme: "vscode-webview", + privileges: { + standard: true, + secure: true, + bypassCSP: true, + supportFetchAPI: true, + }, + }, ]); const gotTheLock = app.requestSingleInstanceLock(); @@ -295,6 +619,86 @@ if (!gotTheLock) { registerWithMacOSNotificationCenter(); requestAppleEventsAccess(); requestLocalNetworkAccess(); + initializeBrowserIdentityManager(); + initializeBrowserWebviewCompat(); + browserSitePermissionManager.initialize(); + try { + const { startBrowserMcpBridge } = await import( + "./lib/browser-mcp-bridge/server" + ); + await startBrowserMcpBridge(); + } catch (error) { + console.warn("[main] browser-mcp-bridge startup skipped", error); + } + // One-shot sweep of 30-day-old pasted attachments so userData + // doesn't grow forever from screenshots dropped into TODOs. + try { + const { cleanupOldAttachments } = await import( + "./todo-agent/attachments-cleanup" + ); + cleanupOldAttachments(); + } catch (error) { + console.warn("[main] todo-agent attachment cleanup skipped", error); + } + + // Fork-local: prune terminal TODO sessions older than the + // user-configured retention (0 = off). Runs after the attachment + // sweep so deleted sessions' images also drop out of the + // attachment reference set on the next run. + try { + const { cleanupOldSessions } = await import( + "./todo-agent/sessions-cleanup" + ); + cleanupOldSessions(); + } catch (error) { + console.warn("[main] todo-agent session cleanup skipped", error); + } + + // Fork-local: connect to the todo-agent daemon (spawning it if + // necessary). The daemon owns `claude -p` child processes so + // running TODO sessions survive app restarts — issue #237. + try { + const { startTodoAgentDaemonBridge } = await import( + "./todo-agent/daemon-bridge" + ); + await startTodoAgentDaemonBridge(); + } catch (error) { + console.warn("[main] todo-agent daemon bridge failed", error); + } + + // Fork-local: start the todo-agent schedule scheduler so cron-like + // recurring TODOs fire while the app is running. Scheduler is a + // noop until a user creates at least one schedule. + try { + const { getTodoScheduler } = await import("./todo-agent/scheduler"); + getTodoScheduler().start(); + } catch (error) { + console.warn("[main] todo-agent scheduler start skipped", error); + // Surface the failure via the existing schedule-fire event + // bus so ScheduleFireToasts shows a one-off toast. Without + // this the feature dies silently and the user keeps waiting + // for fires that will never come. + try { + const { getTodoScheduleStore } = await import( + "./todo-agent/schedule-store" + ); + getTodoScheduleStore().emitFire({ + scheduleId: "__scheduler_init__", + scheduleName: "スケジューラ", + kind: "failed", + sessionId: null, + message: + error instanceof Error + ? `起動に失敗しました: ${error.message}` + : "起動に失敗しました", + firedAt: Date.now(), + }); + } catch { + // If schedule-store itself failed to load there's + // nothing we can surface — console.warn above is our + // last resort. + } + } // Must register on both default session and the app's custom partition const iconProtocolHandler = (request: Request) => { @@ -341,12 +745,34 @@ if (!gotTheLock) { .protocol.handle("superset-font", fontProtocolHandler); } + // Serve extension icons via custom protocol + const extIconHandler = createExtensionIconProtocolHandler(); + protocol.handle("superset-ext-icon", extIconHandler); + session + .fromPartition("persist:superset") + .protocol.handle("superset-ext-icon", extIconHandler); + + // Serve temp audio files (for YouTube import waveform editor) + const tempAudioHandler = createTempAudioProtocolHandler(); + protocol.handle("superset-temp-audio", tempAudioHandler); + session + .fromPartition("persist:superset") + .protocol.handle("superset-temp-audio", tempAudioHandler); + + // Serve workspace audio/video files for the file viewer + const workspaceMediaHandler = createWorkspaceMediaProtocolHandler(); + protocol.handle("superset-workspace-media", workspaceMediaHandler); + session + .fromPartition("persist:superset") + .protocol.handle("superset-workspace-media", workspaceMediaHandler); + ensureProjectIconsDir(); setWorkspaceDockIcon(); initSentry(); await initAppState(); await loadWebviewBrowserExtension(); + await loadInstalledExtensions(); // Must happen before renderer restore runs await reconcileDaemonSessions(); @@ -360,20 +786,33 @@ if (!gotTheLock) { // Discover and adopt host-services that survived a previous quit // before the tray initializes, so it shows accurate status immediately. - await getHostServiceCoordinator().discoverAll(); + await getHostServiceManager().discoverAll(); if (IS_DEV) { - getHostServiceCoordinator().enableDevReload(async () => { + getHostServiceManager().enableDevReload(async () => { const { token } = await loadToken(); if (!token) return null; return { authToken: token, cloudApiUrl: mainEnv.NEXT_PUBLIC_API_URL }; }); } + initNotifications(); await makeAppSetup(() => MainWindow()); setupAutoUpdater(); + setupServiceStatusPolling(); initTray(); + // Initialize VS Code extension host (registers protocols, starts webview server) + // Each workspace spawns its own worker process via ExtensionHostManager. + loadVscodeShim() + .then((mod) => mod.initExtensionHost()) + .catch((err) => { + console.error( + "[main] Failed to initialize VS Code extension host:", + err, + ); + }); + const coldStartUrl = findDeepLinkInArgv(process.argv); if (coldStartUrl) { await processDeepLink(coldStartUrl); diff --git a/apps/desktop/src/main/lib/agent-command-execution-coordinator.test.ts b/apps/desktop/src/main/lib/agent-command-execution-coordinator.test.ts new file mode 100644 index 00000000000..3aaedf42dba --- /dev/null +++ b/apps/desktop/src/main/lib/agent-command-execution-coordinator.test.ts @@ -0,0 +1,47 @@ +import { describe, expect, it } from "bun:test"; +import { AgentCommandExecutionCoordinator } from "./agent-command-execution-coordinator"; + +describe("AgentCommandExecutionCoordinator", () => { + it("grants the first claim and rejects duplicates until release", () => { + const coordinator = new AgentCommandExecutionCoordinator(50); + + expect(coordinator.claim("cmd-1")).toBe(true); + expect(coordinator.claim("cmd-1")).toBe(false); + + coordinator.release("cmd-1"); + + expect(coordinator.claim("cmd-1")).toBe(true); + }); + + it("allows reclaim after the lease expires", async () => { + const coordinator = new AgentCommandExecutionCoordinator(20); + const expiredSoon = new Date(Date.now() + 20); + + expect(coordinator.claim("cmd-2", expiredSoon)).toBe(true); + expect(coordinator.claim("cmd-2", expiredSoon)).toBe(false); + + await Bun.sleep(30); + + expect(coordinator.claim("cmd-2", expiredSoon)).toBe(true); + }); + + it("treats invalid timeout values as a fallback lease", () => { + const coordinator = new AgentCommandExecutionCoordinator(50); + + expect(coordinator.claim("cmd-3", "invalid")).toBe(true); + expect(coordinator.isClaimed("cmd-3")).toBe(true); + }); + + it("uses a short grace lease when timeoutAt is already expired", async () => { + const coordinator = new AgentCommandExecutionCoordinator(50, 20); + const alreadyExpired = new Date(Date.now() - 1_000); + + expect(coordinator.claim("cmd-4", alreadyExpired)).toBe(true); + expect(coordinator.claim("cmd-4", alreadyExpired)).toBe(false); + expect(coordinator.isClaimed("cmd-4")).toBe(true); + + await Bun.sleep(30); + + expect(coordinator.claim("cmd-4", alreadyExpired)).toBe(true); + }); +}); diff --git a/apps/desktop/src/main/lib/agent-command-execution-coordinator.ts b/apps/desktop/src/main/lib/agent-command-execution-coordinator.ts new file mode 100644 index 00000000000..30c1d2ef7dd --- /dev/null +++ b/apps/desktop/src/main/lib/agent-command-execution-coordinator.ts @@ -0,0 +1,74 @@ +interface ClaimEntry { + expiresAt: number; +} + +const DEFAULT_CLAIM_TTL_MS = 5 * 60 * 1000; +const DEFAULT_EXPIRED_TIMEOUT_GRACE_MS = 5_000; + +export class AgentCommandExecutionCoordinator { + private readonly claims = new Map(); + + constructor( + private readonly defaultClaimTtlMs = DEFAULT_CLAIM_TTL_MS, + private readonly expiredTimeoutGraceMs = DEFAULT_EXPIRED_TIMEOUT_GRACE_MS, + ) {} + + claim(commandId: string, timeoutAt?: Date | string | null): boolean { + this.pruneExpiredClaims(); + + const existing = this.claims.get(commandId); + if (existing && existing.expiresAt > Date.now()) { + return false; + } + + this.claims.set(commandId, { + expiresAt: this.resolveExpiry(timeoutAt), + }); + return true; + } + + release(commandId: string): void { + this.claims.delete(commandId); + } + + isClaimed(commandId: string): boolean { + this.pruneExpiredClaims(); + const entry = this.claims.get(commandId); + return !!entry && entry.expiresAt > Date.now(); + } + + private pruneExpiredClaims(): void { + const now = Date.now(); + for (const [commandId, entry] of this.claims.entries()) { + if (entry.expiresAt <= now) { + this.claims.delete(commandId); + } + } + } + + private resolveExpiry(timeoutAt?: Date | string | null): number { + const now = Date.now(); + const parsed = + timeoutAt instanceof Date + ? timeoutAt.getTime() + : typeof timeoutAt === "string" + ? Date.parse(timeoutAt) + : Number.NaN; + if (Number.isFinite(parsed)) { + if (parsed > now) { + return parsed; + } + return now + this.expiredTimeoutGraceMs; + } + return now + this.defaultClaimTtlMs; + } +} + +let coordinator: AgentCommandExecutionCoordinator | null = null; + +export function getAgentCommandExecutionCoordinator(): AgentCommandExecutionCoordinator { + if (!coordinator) { + coordinator = new AgentCommandExecutionCoordinator(); + } + return coordinator; +} diff --git a/apps/desktop/src/main/lib/agent-setup/agent-wrappers-claude-codex-opencode.ts b/apps/desktop/src/main/lib/agent-setup/agent-wrappers-claude-codex-opencode.ts index db889900cd5..06ccefe25cb 100644 --- a/apps/desktop/src/main/lib/agent-setup/agent-wrappers-claude-codex-opencode.ts +++ b/apps/desktop/src/main/lib/agent-setup/agent-wrappers-claude-codex-opencode.ts @@ -275,7 +275,8 @@ export function createClaudeWrapper(): void { } /** - * Creates the Codex wrapper that injects Superset's notify/session-log logic. + * Creates the Codex wrapper that enables native hooks and keeps the + * session-log watcher for prompt/permission events inside Superset terminals. */ export function createCodexWrapper(): void { const notifyPath = getNotifyScriptPath(); @@ -423,14 +424,15 @@ export function getCodexGlobalHooksJsonContent( /** * Writes Superset hook definitions directly into ~/.codex/hooks.json. - * This provides a fallback notification path that works even when the - * binary wrapper is not in PATH (e.g. user runs codex from outside + * This is the primary lifecycle notification path for Codex and also works + * when the binary wrapper is not in PATH (e.g. user runs codex from outside * a Superset terminal). * - * The wrapper still injects Codex's native notify callback and keeps the - * session-log watcher as a best-effort bridge for older releases, but the - * native hooks.json registration is now the primary source for prompt/tool - * lifecycle events. + * The wrapper only enables Codex hooks and keeps the session-log watcher as a + * best-effort bridge for prompt/permission events inside Superset terminals. + * Completion notifications are handled exclusively via hooks.json to avoid + * the duplicate `/hook/complete` POSTs that occurred when the wrapper also + * injected `--notify=[...]`. */ export function createCodexHooksJson(): void { const notifyScriptPath = getNotifyScriptPath(); diff --git a/apps/desktop/src/main/lib/agent-setup/agent-wrappers-common.ts b/apps/desktop/src/main/lib/agent-setup/agent-wrappers-common.ts index deff26ad578..81026819804 100644 --- a/apps/desktop/src/main/lib/agent-setup/agent-wrappers-common.ts +++ b/apps/desktop/src/main/lib/agent-setup/agent-wrappers-common.ts @@ -6,11 +6,7 @@ import { BIN_DIR } from "./paths"; export const WRAPPER_MARKER = "# Superset agent-wrapper v1"; export { SUPERSET_MANAGED_BINARIES }; -// Dev setup (.superset/lib/setup/steps.sh) points SUPERSET_HOME_DIR at -// $PWD/superset-dev-data — without a leading dot — so we must recognize that -// variant to reap stale notify.sh paths from deleted worktrees. -const SUPERSET_MANAGED_HOOK_PATH_PATTERN = - /\/(?:\.superset(?:-[^/'"\s\\]+)?|superset-dev-data)\//; +const SUPERSET_MANAGED_HOOK_PATH_PATTERN = /\/\.superset(?:-[^/'"\s\\]+)?\//; export function writeFileIfChanged( filePath: string, @@ -124,10 +120,75 @@ if [ -z "$REAL_BIN" ]; then exit 127 fi +export SUPERSET_WRAPPER_PID="$$" + ${execLine} `; } +export function getSleepInhibitorShellSnippet(): string { + return `_superset_manage_sleep_inhibitor() { + [ -n "$SUPERSET_WRAPPER_PID" ] || return 0 + [ "$SUPERSET_PREVENT_AGENT_SLEEP" = "1" ] || return 0 + + _superset_platform="$(uname -s 2>/dev/null)" + case "$_superset_platform" in + Darwin) + command -v caffeinate >/dev/null 2>&1 || return 0 + ;; + Linux) + command -v systemd-inhibit >/dev/null 2>&1 || return 0 + ;; + *) + return 0 + ;; + esac + + _superset_sleep_dir="\${TMPDIR:-/tmp}/superset-sleep-inhibitors" + mkdir -p "$_superset_sleep_dir" >/dev/null 2>&1 || return 0 + _superset_pid_file="$_superset_sleep_dir/\${SUPERSET_WRAPPER_PID}.pid" + + case "$EVENT_TYPE" in + Start|PermissionRequest) + if [ -f "$_superset_pid_file" ]; then + _superset_inhibitor_pid=$(cat "$_superset_pid_file" 2>/dev/null) + if [ -n "$_superset_inhibitor_pid" ] && kill -0 "$_superset_inhibitor_pid" 2>/dev/null; then + return 0 + fi + rm -f "$_superset_pid_file" >/dev/null 2>&1 || true + fi + + kill -0 "$SUPERSET_WRAPPER_PID" 2>/dev/null || return 0 + + case "$_superset_platform" in + Darwin) + caffeinate -i -w "$SUPERSET_WRAPPER_PID" >/dev/null 2>&1 & + ;; + Linux) + systemd-inhibit --what=idle:sleep --who="Superset" --why="Agent task in progress" \\ + /bin/sh -c 'wrapper_pid="$1"; while kill -0 "$wrapper_pid" 2>/dev/null; do sleep 15; done' \\ + _ "$SUPERSET_WRAPPER_PID" >/dev/null 2>&1 & + ;; + esac + + echo "$!" > "$_superset_pid_file" + ;; + Stop) + if [ -f "$_superset_pid_file" ]; then + _superset_inhibitor_pid=$(cat "$_superset_pid_file" 2>/dev/null) + if [ -n "$_superset_inhibitor_pid" ] && kill -0 "$_superset_inhibitor_pid" 2>/dev/null; then + kill "$_superset_inhibitor_pid" >/dev/null 2>&1 || true + fi + rm -f "$_superset_pid_file" >/dev/null 2>&1 || true + fi + ;; + esac +} + +_superset_manage_sleep_inhibitor +`; +} + export function createWrapper(binaryName: string, script: string): void { const changed = writeFileIfChanged(getWrapperPath(binaryName), script, 0o755); console.log( diff --git a/apps/desktop/src/main/lib/agent-setup/agent-wrappers-copilot.ts b/apps/desktop/src/main/lib/agent-setup/agent-wrappers-copilot.ts index 4aba1cec9ac..fa2d92237be 100644 --- a/apps/desktop/src/main/lib/agent-setup/agent-wrappers-copilot.ts +++ b/apps/desktop/src/main/lib/agent-setup/agent-wrappers-copilot.ts @@ -4,6 +4,7 @@ import { env } from "shared/env.shared"; import { buildWrapperScript, createWrapper, + getSleepInhibitorShellSnippet, writeFileIfChanged, } from "./agent-wrappers-common"; import { HOOKS_DIR } from "./paths"; @@ -28,6 +29,7 @@ export function getCopilotHookScriptContent(): string { const template = fs.readFileSync(COPILOT_HOOK_TEMPLATE_PATH, "utf-8"); return template .replace("{{MARKER}}", COPILOT_HOOK_MARKER) + .replace("{{SLEEP_INHIBITOR_SNIPPET}}", getSleepInhibitorShellSnippet()) .replace(/\{\{DEFAULT_PORT\}\}/g, String(env.DESKTOP_NOTIFICATIONS_PORT)); } diff --git a/apps/desktop/src/main/lib/agent-setup/agent-wrappers-cursor.ts b/apps/desktop/src/main/lib/agent-setup/agent-wrappers-cursor.ts index f5b8580bd56..2bbc25af7a4 100644 --- a/apps/desktop/src/main/lib/agent-setup/agent-wrappers-cursor.ts +++ b/apps/desktop/src/main/lib/agent-setup/agent-wrappers-cursor.ts @@ -5,6 +5,7 @@ import { env } from "shared/env.shared"; import { buildWrapperScript, createWrapper, + getSleepInhibitorShellSnippet, isSupersetManagedHookCommand, reconcileManagedEntries, writeFileIfChanged, @@ -46,6 +47,7 @@ export function getCursorHookScriptContent(): string { const template = fs.readFileSync(CURSOR_HOOK_TEMPLATE_PATH, "utf-8"); return template .replace("{{MARKER}}", CURSOR_HOOK_MARKER) + .replace("{{SLEEP_INHIBITOR_SNIPPET}}", getSleepInhibitorShellSnippet()) .replace(/\{\{DEFAULT_PORT\}\}/g, String(env.DESKTOP_NOTIFICATIONS_PORT)); } diff --git a/apps/desktop/src/main/lib/agent-setup/agent-wrappers-gemini.ts b/apps/desktop/src/main/lib/agent-setup/agent-wrappers-gemini.ts index 8e3b7efa282..123ba266e75 100644 --- a/apps/desktop/src/main/lib/agent-setup/agent-wrappers-gemini.ts +++ b/apps/desktop/src/main/lib/agent-setup/agent-wrappers-gemini.ts @@ -5,6 +5,7 @@ import { env } from "shared/env.shared"; import { buildWrapperScript, createWrapper, + getSleepInhibitorShellSnippet, isSupersetManagedHookCommand, reconcileManagedEntries, writeFileIfChanged, @@ -52,6 +53,7 @@ export function getGeminiHookScriptContent(): string { const template = fs.readFileSync(GEMINI_HOOK_TEMPLATE_PATH, "utf-8"); return template .replace("{{MARKER}}", GEMINI_HOOK_MARKER) + .replace("{{SLEEP_INHIBITOR_SNIPPET}}", getSleepInhibitorShellSnippet()) .replace(/\{\{DEFAULT_PORT\}\}/g, String(env.DESKTOP_NOTIFICATIONS_PORT)); } diff --git a/apps/desktop/src/main/lib/agent-setup/agent-wrappers.test.ts b/apps/desktop/src/main/lib/agent-setup/agent-wrappers.test.ts index 0d97253eb97..338c6e196f8 100644 --- a/apps/desktop/src/main/lib/agent-setup/agent-wrappers.test.ts +++ b/apps/desktop/src/main/lib/agent-setup/agent-wrappers.test.ts @@ -172,7 +172,7 @@ describe("agent-wrappers copilot", () => { expect(updated).not.toContain("/tmp/old-hook.sh"); }); - it("injects codex start + permission watchers and completion notifications in wrapper", () => { + it("injects codex start + permission watchers and enables native hooks", () => { createCodexWrapper(); const wrapperPath = path.join(TEST_BIN_DIR, "codex"); @@ -197,9 +197,7 @@ describe("agent-wrappers copilot", () => { expect(wrapper).toContain('awk -F\'"approval_id":"\''); expect(wrapper).toContain('_superset_emit_event "Start"'); expect(wrapper).toContain('_superset_emit_event "PermissionRequest"'); - expect(wrapper).toContain( - `"$REAL_BIN" --enable codex_hooks -c 'notify=["bash","${path.join(TEST_HOOKS_DIR, "notify.sh")}"]' "$@"`, - ); + expect(wrapper).toContain(`"$REAL_BIN" --enable codex_hooks "$@"`); expect(wrapper).toContain("SUPERSET_CODEX_START_WATCHER_PID"); expect(wrapper).toContain('kill "$SUPERSET_CODEX_START_WATCHER_PID"'); @@ -239,14 +237,9 @@ exit 0 }); expect(readFileSync(argsFile, "utf-8")).toBe( - `${[ - "--enable", - "codex_hooks", - "-c", - `notify=["bash","${path.join(TEST_HOOKS_DIR, "notify.sh")}"]`, - "exec", - "Reply with exactly OK.", - ].join("\n")}\n`, + `${["--enable", "codex_hooks", "exec", "Reply with exactly OK."].join( + "\n", + )}\n`, ); }); @@ -1193,71 +1186,6 @@ describe("agent-wrappers codex hooks.json", () => { ).toBe(true); }); - it("reaps stale notify.sh paths from in-repo dev worktrees", () => { - const codexHooksPath = path.join(mockedHomeDir, ".codex", "hooks.json"); - // Real-world layout: a dev worktree lives under /.worktrees/ - // and its dev setup writes SUPERSET_HOME_DIR=/superset-dev-data. - // There is no /.superset/ segment anywhere in the path. - const staleHookPath = - "/Users/test/code/superset/.worktrees/old-branch/superset-dev-data/hooks/notify.sh"; - const currentHookPath = "/tmp/.superset-new/hooks/notify.sh"; - - mkdirSync(path.dirname(codexHooksPath), { recursive: true }); - writeFileSync( - codexHooksPath, - JSON.stringify( - { - hooks: { - SessionStart: [ - { hooks: [{ type: "command", command: staleHookPath }] }, - ], - UserPromptSubmit: [ - { hooks: [{ type: "command", command: staleHookPath }] }, - ], - Stop: [ - { hooks: [{ type: "command", command: staleHookPath }] }, - ], - }, - }, - null, - 2, - ), - ); - - const content = getCodexGlobalHooksJsonContent(currentHookPath); - expect(content).not.toBeNull(); - if (content === null) throw new Error("Expected content"); - - const parsed = JSON.parse(content) as { - hooks: Record< - string, - Array<{ - matcher?: string; - hooks: Array<{ type: string; command: string }>; - }> - >; - }; - - for (const eventName of [ - "SessionStart", - "UserPromptSubmit", - "Stop", - ] as const) { - const hooks = parsed.hooks[eventName]; - expect(Array.isArray(hooks)).toBe(true); - expect( - hooks.some((def) => - def.hooks.some((hook) => hook.command === currentHookPath), - ), - ).toBe(true); - expect( - hooks.some((def) => - def.hooks.some((hook) => hook.command === staleHookPath), - ), - ).toBe(false); - } - }); - it("skips Codex hooks writes when existing JSON is invalid", () => { const codexHooksPath = path.join(mockedHomeDir, ".codex", "hooks.json"); const invalidJson = "{not-json"; diff --git a/apps/desktop/src/main/lib/agent-setup/notify-hook.ts b/apps/desktop/src/main/lib/agent-setup/notify-hook.ts index 19968b1385d..056833fe5bb 100644 --- a/apps/desktop/src/main/lib/agent-setup/notify-hook.ts +++ b/apps/desktop/src/main/lib/agent-setup/notify-hook.ts @@ -1,6 +1,7 @@ import fs from "node:fs"; import path from "node:path"; import { env } from "shared/env.shared"; +import { getSleepInhibitorShellSnippet } from "./agent-wrappers-common"; import { HOOKS_DIR } from "./paths"; export const NOTIFY_SCRIPT_NAME = "notify.sh"; @@ -41,6 +42,7 @@ export function getNotifyScriptContent(): string { const template = fs.readFileSync(NOTIFY_SCRIPT_TEMPLATE_PATH, "utf-8"); return template .replaceAll("{{MARKER}}", NOTIFY_SCRIPT_MARKER) + .replace("{{SLEEP_INHIBITOR_SNIPPET}}", getSleepInhibitorShellSnippet()) .replaceAll("{{DEFAULT_PORT}}", String(env.DESKTOP_NOTIFICATIONS_PORT)); } diff --git a/apps/desktop/src/main/lib/agent-setup/shell-wrappers.test.ts b/apps/desktop/src/main/lib/agent-setup/shell-wrappers.test.ts index 88cd78a3951..b6164ea1069 100644 --- a/apps/desktop/src/main/lib/agent-setup/shell-wrappers.test.ts +++ b/apps/desktop/src/main/lib/agent-setup/shell-wrappers.test.ts @@ -839,9 +839,6 @@ export SUPERSET_WORKSPACE_PATH="/wrong/path" expect(args[0]).toBe("-l"); expect(args[1]).toBe("--init-command"); expect(args[2]).toContain(`set -l _superset_bin "${TEST_BIN_DIR}"`); - // Both markers are emitted so old v1 daemons (777 scanner) and new - // scanners (133;A) both detect readiness without a daemon restart. - expect(args[2]).toContain("\\033]777;superset-shell-ready\\007"); expect(args[2]).toContain("\\033]133;A\\007"); }); @@ -857,21 +854,7 @@ export SUPERSET_WORKSPACE_PATH="/wrong/path" expect(args[2]).toContain( 'set -l _superset_bin "/tmp/with space/quote\\"buck\\$slash\\\\bin"', ); - expect(args[2]).toContain("777;superset-shell-ready"); expect(args[2]).toContain("133;A"); }); - - it("zsh/bash wrappers emit both legacy 777 and current 133;A markers", () => { - createZshWrapper(TEST_PATHS); - createBashWrapper(TEST_PATHS); - - const zlogin = readFileSync(path.join(TEST_ZSH_DIR, ".zlogin"), "utf-8"); - const rcfile = readFileSync(path.join(TEST_BASH_DIR, "rcfile"), "utf-8"); - - for (const wrapper of [zlogin, rcfile]) { - expect(wrapper).toContain("\\033]777;superset-shell-ready\\007"); - expect(wrapper).toContain("\\033]133;A\\007"); - } - }); }); }); diff --git a/apps/desktop/src/main/lib/agent-setup/shell-wrappers.ts b/apps/desktop/src/main/lib/agent-setup/shell-wrappers.ts index 3c8ae59f329..4489f574861 100644 --- a/apps/desktop/src/main/lib/agent-setup/shell-wrappers.ts +++ b/apps/desktop/src/main/lib/agent-setup/shell-wrappers.ts @@ -218,15 +218,10 @@ ${SUPERSET_ENV_RESTORE} ${buildZshPrecmdHook(paths.BIN_DIR)} ${buildPathPrependFunction(paths.BIN_DIR)} rehash 2>/dev/null || true -# Shell readiness markers. Emitting both keeps us compatible across daemon -# versions: the legacy v1 daemon scans for OSC 777, the current scanner (v1 -# post-refactor + v2 host-service) scans for OSC 133;A (FinalTerm standard). -# Wrappers are rewritten on every app launch, so main always ships the -# superset of markers; daemons that only get restarted on protocol bumps -# still match against their own scanner. +# OSC 133;A prompt marker (FinalTerm standard) — signals shell readiness. # Protocol ref: https://gitlab.freedesktop.org/Per_Bothner/specifications/blob/master/proposals/semantic-prompts.md __superset_prompt_mark() { - printf "\\033]777;superset-shell-ready\\007\\033]133;A\\007" + printf "\\033]133;A\\007" } # Keep our hook LAST so it fires after direnv and other precmd hooks complete. precmd_functions=(\${precmd_functions[@]} __superset_prompt_mark) @@ -273,10 +268,10 @@ ${buildPathPrependFunction(paths.BIN_DIR)} hash -r 2>/dev/null || true # Minimal prompt (path/env shown in toolbar) - emerald to match app theme export PS1=$'\\[\\e[1;38;2;52;211;153m\\]❯\\[\\e[0m\\] ' -# Shell readiness markers — see zsh wrapper for rationale on emitting both. +# OSC 133;A prompt marker (FinalTerm standard) — signals shell readiness. # Protocol ref: https://gitlab.freedesktop.org/Per_Bothner/specifications/blob/master/proposals/semantic-prompts.md __superset_prompt_mark() { - printf "\\033]777;superset-shell-ready\\007\\033]133;A\\007" + printf "\\033]133;A\\007" } # Hook via PROMPT_COMMAND. Supports both scalar and array forms (Bash 5.1+). if [[ "$(declare -p PROMPT_COMMAND 2>/dev/null)" == "declare -a"* ]]; then @@ -320,8 +315,7 @@ export function getShellArgs( if (shellName === "fish") { // Use --init-command to prepend BIN_DIR to PATH after config is loaded. // Use fish list-aware checks to avoid duplicate PATH entries across nested shells. - // Emit both OSC 777 (legacy v1 daemon) and OSC 133;A (current scanner) - // on fish_prompt. See zsh wrapper for rationale. + // OSC 133;A emitted on fish_prompt — signals shell readiness. const escapedBinDir = escapeFishDoubleQuoted(paths.BIN_DIR); return [ "-l", @@ -331,7 +325,7 @@ export function getShellArgs( `contains -- "$_superset_bin" $PATH`, `or set -gx PATH "$_superset_bin" $PATH`, `function _superset_prompt_mark --on-event fish_prompt`, - `printf '\\033]777;superset-shell-ready\\007\\033]133;A\\007'`, + `printf '\\033]133;A\\007'`, `end`, ].join("; "), ]; diff --git a/apps/desktop/src/main/lib/agent-setup/templates/codex-wrapper-exec.template.sh b/apps/desktop/src/main/lib/agent-setup/templates/codex-wrapper-exec.template.sh index 8aa12395105..86130c5e18d 100644 --- a/apps/desktop/src/main/lib/agent-setup/templates/codex-wrapper-exec.template.sh +++ b/apps/desktop/src/main/lib/agent-setup/templates/codex-wrapper-exec.template.sh @@ -1,6 +1,6 @@ -# Codex exposes completion notifications via notify. -# For per-prompt Start notifications and permission requests, watch the TUI -# session log for task_started/exec_command_begin and *_approval_request events. +# Native ~/.codex/hooks.json handles SessionStart/UserPromptSubmit/Stop. +# The wrapper keeps the session-log watcher only for per-prompt Start +# notifications and permission requests inside Superset terminals. if [ -n "$SUPERSET_TAB_ID" ] && [ -f "{{NOTIFY_PATH}}" ]; then export CODEX_TUI_RECORD_SESSION=1 if [ -z "$CODEX_TUI_SESSION_LOG_PATH" ]; then @@ -72,7 +72,7 @@ if [ -n "$SUPERSET_TAB_ID" ] && [ -f "{{NOTIFY_PATH}}" ]; then SUPERSET_CODEX_START_WATCHER_PID=$! fi -"$REAL_BIN" --enable codex_hooks -c 'notify=["bash","{{NOTIFY_PATH}}"]' "$@" +"$REAL_BIN" --enable codex_hooks "$@" SUPERSET_CODEX_STATUS=$? if [ -n "$SUPERSET_CODEX_START_WATCHER_PID" ]; then diff --git a/apps/desktop/src/main/lib/agent-setup/templates/copilot-hook.template.sh b/apps/desktop/src/main/lib/agent-setup/templates/copilot-hook.template.sh index cdfb1581084..cda1f05e9d8 100644 --- a/apps/desktop/src/main/lib/agent-setup/templates/copilot-hook.template.sh +++ b/apps/desktop/src/main/lib/agent-setup/templates/copilot-hook.template.sh @@ -29,6 +29,8 @@ printf '{}\n' [ -z "$SUPERSET_TAB_ID" ] && exit 0 +{{SLEEP_INHIBITOR_SNIPPET}} + curl -sG "http://127.0.0.1:${SUPERSET_PORT:-{{DEFAULT_PORT}}}/hook/complete" \ --connect-timeout 1 --max-time 2 \ --data-urlencode "paneId=$SUPERSET_PANE_ID" \ diff --git a/apps/desktop/src/main/lib/agent-setup/templates/cursor-hook.template.sh b/apps/desktop/src/main/lib/agent-setup/templates/cursor-hook.template.sh index f2e2483ffa9..dc13f64b278 100644 --- a/apps/desktop/src/main/lib/agent-setup/templates/cursor-hook.template.sh +++ b/apps/desktop/src/main/lib/agent-setup/templates/cursor-hook.template.sh @@ -27,6 +27,8 @@ fi # cursor-agent runs inside a Superset terminal, so env vars are inherited directly [ -z "$SUPERSET_TAB_ID" ] && exit 0 +{{SLEEP_INHIBITOR_SNIPPET}} + curl -sG "http://127.0.0.1:${SUPERSET_PORT:-{{DEFAULT_PORT}}}/hook/complete" \ --connect-timeout 1 --max-time 2 \ --data-urlencode "paneId=$SUPERSET_PANE_ID" \ diff --git a/apps/desktop/src/main/lib/agent-setup/templates/gemini-hook.template.sh b/apps/desktop/src/main/lib/agent-setup/templates/gemini-hook.template.sh index a54e780c99a..53b25e1ef06 100644 --- a/apps/desktop/src/main/lib/agent-setup/templates/gemini-hook.template.sh +++ b/apps/desktop/src/main/lib/agent-setup/templates/gemini-hook.template.sh @@ -28,6 +28,8 @@ printf '{}\n' # Skip notification if not inside a Superset terminal [ -z "$SUPERSET_TAB_ID" ] && exit 0 +{{SLEEP_INHIBITOR_SNIPPET}} + curl -sG "http://127.0.0.1:${SUPERSET_PORT:-{{DEFAULT_PORT}}}/hook/complete" \ --connect-timeout 1 --max-time 2 \ --data-urlencode "paneId=$SUPERSET_PANE_ID" \ diff --git a/apps/desktop/src/main/lib/agent-setup/templates/notify-hook.template.sh b/apps/desktop/src/main/lib/agent-setup/templates/notify-hook.template.sh index 925702abf4b..13de189926f 100644 --- a/apps/desktop/src/main/lib/agent-setup/templates/notify-hook.template.sh +++ b/apps/desktop/src/main/lib/agent-setup/templates/notify-hook.template.sh @@ -53,6 +53,8 @@ fi # This prevents parse failures from causing false completion notifications [ -z "$EVENT_TYPE" ] && exit 0 +{{SLEEP_INHIBITOR_SNIPPET}} + DEBUG_HOOKS_ENABLED="0" if [ -n "$SUPERSET_DEBUG_HOOKS" ]; then case "$SUPERSET_DEBUG_HOOKS" in @@ -68,7 +70,7 @@ elif [ "$SUPERSET_ENV" = "development" ] || [ "$NODE_ENV" = "development" ]; the fi if [ "$DEBUG_HOOKS_ENABLED" = "1" ]; then - echo "[notify-hook] event=$EVENT_TYPE sessionId=$SESSION_ID hookSessionId=$HOOK_SESSION_ID resourceId=$RESOURCE_ID paneId=$SUPERSET_PANE_ID tabId=$SUPERSET_TAB_ID workspaceId=$SUPERSET_WORKSPACE_ID" >&2 + echo "[notify-hook] event=$EVENT_TYPE sessionId=$SESSION_ID hookSessionId=$HOOK_SESSION_ID resourceId=$RESOURCE_ID paneId=$SUPERSET_PANE_ID tabId=$SUPERSET_TAB_ID workspaceId=$SUPERSET_WORKSPACE_ID wrapperPid=$SUPERSET_WRAPPER_PID" >&2 fi # Timeouts prevent blocking agent completion if notification server is unresponsive diff --git a/apps/desktop/src/main/lib/aivis/client.ts b/apps/desktop/src/main/lib/aivis/client.ts new file mode 100644 index 00000000000..4ac7c85a2b1 --- /dev/null +++ b/apps/desktop/src/main/lib/aivis/client.ts @@ -0,0 +1,91 @@ +import { settings } from "@superset/local-db"; +import { localDb } from "../local-db"; + +const BASE_URL = "https://api.aivis-project.com"; + +export class AivisApiKeyMissingError extends Error { + constructor() { + super("Aivis API key is not configured"); + this.name = "AivisApiKeyMissingError"; + } +} + +export class AivisApiError extends Error { + constructor( + readonly status: number, + readonly bodyText: string, + ) { + super(`Aivis API error ${status}: ${bodyText.slice(0, 300)}`); + this.name = "AivisApiError"; + } +} + +function readApiKey(): string | null { + try { + const row = localDb.select().from(settings).get(); + const key = row?.aivisApiKey?.trim(); + return key || null; + } catch { + return null; + } +} + +export interface AivisFetchInit extends Omit { + query?: Record; + json?: unknown; + /** Override the stored API key (used for validation from a form). */ + apiKey?: string | null; + /** If true, do not require an API key (for public endpoints like model search). */ + optionalAuth?: boolean; +} + +/** + * Authorized fetch wrapper for the Aivis Cloud API. + * Throws AivisApiKeyMissingError if no key is configured, and AivisApiError + * on non-2xx responses. + */ +export async function aivisFetch( + path: string, + init: AivisFetchInit = {}, +): Promise { + const key = init.apiKey ?? readApiKey(); + if (!key && !init.optionalAuth) throw new AivisApiKeyMissingError(); + + const url = new URL(path, BASE_URL); + for (const [k, v] of Object.entries(init.query ?? {})) { + if (v !== undefined && v !== null) url.searchParams.set(k, String(v)); + } + + const headers: Record = { + Accept: "application/json", + ...(init.headers as Record | undefined), + }; + if (key) headers.Authorization = `Bearer ${key}`; + + let body: BodyInit | undefined; + if (init.json !== undefined) { + headers["Content-Type"] = "application/json"; + body = JSON.stringify(init.json); + } + + const res = await fetch(url, { + ...init, + headers, + body, + }); + + if (!res.ok) { + const text = await res.text().catch(() => ""); + throw new AivisApiError(res.status, text); + } + + return res; +} + +export async function aivisJson( + path: string, + init: AivisFetchInit = {}, +): Promise { + const res = await aivisFetch(path, init); + return (await res.json()) as T; +} diff --git a/apps/desktop/src/main/lib/app-state/index.ts b/apps/desktop/src/main/lib/app-state/index.ts index 00e9fe790f5..68b2fddce1a 100644 --- a/apps/desktop/src/main/lib/app-state/index.ts +++ b/apps/desktop/src/main/lib/app-state/index.ts @@ -1,5 +1,8 @@ import { JSONFilePreset } from "lowdb/node"; -import { APP_STATE_PATH } from "../app-environment"; +import { + APP_STATE_PATH, + ensureSupersetHomeDirExists, +} from "../app-environment"; import type { AppState } from "./schemas"; import { defaultAppState } from "./schemas"; @@ -7,6 +10,33 @@ type AppStateDB = Awaited>>; let _appState: AppStateDB | null = null; +function isMissingPathError(error: unknown): boolean { + return error instanceof Error && "code" in error && error.code === "ENOENT"; +} + +function withWriteRetry(appStateDb: AppStateDB): AppStateDB { + const originalWrite = appStateDb.write.bind(appStateDb); + + appStateDb.write = async () => { + // The Superset home directory can disappear after startup. Recreate it before + // each write and retry once on ENOENT so app-state persistence self-heals. + ensureSupersetHomeDirExists(); + + try { + await originalWrite(); + } catch (error) { + if (!isMissingPathError(error)) { + throw error; + } + + ensureSupersetHomeDirExists(); + await originalWrite(); + } + }; + + return appStateDb; +} + /** * Ensures loaded data has the correct shape by merging with defaults. * Handles legacy app-state.json files that may have a different structure @@ -30,13 +60,20 @@ function ensureValidShape(data: Partial): AppState { ...(data.hotkeysState?.byPlatform ?? {}), }, }, + vibrancyState: { + ...defaultAppState.vibrancyState, + ...(data.vibrancyState ?? {}), + }, }; } export async function initAppState(): Promise { if (_appState) return; - _appState = await JSONFilePreset(APP_STATE_PATH, defaultAppState); + ensureSupersetHomeDirExists(); + _appState = withWriteRetry( + await JSONFilePreset(APP_STATE_PATH, defaultAppState), + ); // Reshape data to ensure it has the correct structure (handles legacy formats) _appState.data = ensureValidShape(_appState.data); diff --git a/apps/desktop/src/main/lib/app-state/schemas.ts b/apps/desktop/src/main/lib/app-state/schemas.ts index d381e08b2f9..48af24876a6 100644 --- a/apps/desktop/src/main/lib/app-state/schemas.ts +++ b/apps/desktop/src/main/lib/app-state/schemas.ts @@ -3,6 +3,10 @@ */ import type { BaseTabsState } from "shared/tabs-types"; import type { Theme } from "shared/themes"; +import { + DEFAULT_VIBRANCY_STATE, + type VibrancyState, +} from "shared/vibrancy-types"; // Re-export for convenience export type { BaseTabsState as TabsState, Pane } from "shared/tabs-types"; @@ -24,6 +28,7 @@ export interface AppState { tabsState: BaseTabsState; themeState: ThemeState; hotkeysState: LegacyHotkeysState; + vibrancyState: VibrancyState; } export const defaultAppState: AppState = { @@ -44,4 +49,5 @@ export const defaultAppState: AppState = { version: 1, byPlatform: { darwin: {}, win32: {}, linux: {} }, }, + vibrancyState: DEFAULT_VIBRANCY_STATE, }; diff --git a/apps/desktop/src/main/lib/auto-updater.test.ts b/apps/desktop/src/main/lib/auto-updater.test.ts deleted file mode 100644 index 6788b44e54c..00000000000 --- a/apps/desktop/src/main/lib/auto-updater.test.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { beforeEach, describe, expect, mock, test } from "bun:test"; -import { EventEmitter } from "node:events"; - -class FakeAutoUpdater extends EventEmitter { - autoDownload = false; - autoInstallOnAppQuit = false; - disableDifferentialDownload = false; - allowDowngrade = false; - setFeedURL = mock(() => {}); - checkForUpdates = mock(() => Promise.resolve(null)); - quitAndInstall = mock(() => {}); -} - -const fakeAutoUpdater = new FakeAutoUpdater(); - -mock.module("electron-updater", () => ({ - autoUpdater: fakeAutoUpdater, -})); - -mock.module("electron", () => ({ - app: { - getPath: mock(() => ""), - getName: mock(() => "test-app"), - getVersion: mock(() => "1.0.0"), - getAppPath: mock(() => ""), - isPackaged: false, - isReady: mock(() => true), - whenReady: mock(() => Promise.resolve()), - }, - dialog: { - showMessageBox: mock(() => Promise.resolve({ response: 0 })), - }, -})); - -mock.module("main/index", () => ({ - setSkipQuitConfirmation: mock(() => {}), -})); - -// auto-updater short-circuits setupAutoUpdater on non-mac/linux hosts, so -// pin the platform here to keep the tests portable across CI runners. -mock.module("shared/constants", () => ({ - PLATFORM: { IS_MAC: true, IS_WINDOWS: false, IS_LINUX: false }, -})); - -const autoUpdater = await import("./auto-updater"); -const { AUTO_UPDATE_STATUS } = await import("shared/auto-update"); - -describe("installUpdate", () => { - beforeEach(() => { - fakeAutoUpdater.removeAllListeners(); - fakeAutoUpdater.quitAndInstall.mockClear(); - fakeAutoUpdater.checkForUpdates.mockClear(); - fakeAutoUpdater.setFeedURL.mockClear(); - autoUpdater.setupAutoUpdater(); - // The module is a singleton; emit a network-shaped error so the - // handler resets isInstalling and maps status back to IDLE without - // tripping the real ERROR path (which would also clear the cache). - fakeAutoUpdater.emit("error", new Error("ECONNRESET reset")); - }); - - test("ignores install requests when no update is ready", () => { - expect(autoUpdater.getUpdateStatus().status).not.toBe( - AUTO_UPDATE_STATUS.READY, - ); - - autoUpdater.installUpdate(); - - expect(fakeAutoUpdater.quitAndInstall).not.toHaveBeenCalled(); - }); - - test("collapses repeat install clicks into a single quitAndInstall call", () => { - fakeAutoUpdater.emit("update-downloaded", { version: "9.9.9" }); - expect(autoUpdater.getUpdateStatus().status).toBe(AUTO_UPDATE_STATUS.READY); - - autoUpdater.installUpdate(); - autoUpdater.installUpdate(); - autoUpdater.installUpdate(); - - expect(fakeAutoUpdater.quitAndInstall).toHaveBeenCalledTimes(1); - }); - - test("clears the in-flight guard when Squirrel surfaces an error", () => { - fakeAutoUpdater.emit("update-downloaded", { version: "9.9.9" }); - autoUpdater.installUpdate(); - expect(fakeAutoUpdater.quitAndInstall).toHaveBeenCalledTimes(1); - - fakeAutoUpdater.emit("error", new Error("squirrel failed")); - fakeAutoUpdater.emit("update-downloaded", { version: "9.9.9" }); - autoUpdater.installUpdate(); - - expect(fakeAutoUpdater.quitAndInstall).toHaveBeenCalledTimes(2); - }); -}); diff --git a/apps/desktop/src/main/lib/auto-updater.ts b/apps/desktop/src/main/lib/auto-updater.ts index 74535630215..c14ed487036 100644 --- a/apps/desktop/src/main/lib/auto-updater.ts +++ b/apps/desktop/src/main/lib/auto-updater.ts @@ -2,8 +2,8 @@ import { EventEmitter } from "node:events"; import { app, dialog } from "electron"; import { autoUpdater } from "electron-updater"; import { env } from "main/env.main"; -import { setSkipQuitConfirmation } from "main/index"; -import { prerelease } from "semver"; +import { prepareQuit } from "main/index"; +import { gt, prerelease, valid } from "semver"; import { AUTO_UPDATE_STATUS, type AutoUpdateStatus } from "shared/auto-update"; import { PLATFORM } from "shared/constants"; @@ -43,6 +43,17 @@ function isPrereleaseBuild(): boolean { const IS_PRERELEASE = isPrereleaseBuild(); const IS_AUTO_UPDATE_PLATFORM = PLATFORM.IS_MAC || PLATFORM.IS_LINUX; +// Fork builds use GitHub API to check for new releases instead of electron-updater. +// electron-updater's feed URL points to superset-sh/superset which doesn't +// distribute binaries for this fork, causing the UI to get stuck on +// "Downloading update..." indefinitely. +const IS_FORK = true; + +const FORK_OWNER = "MocA-Love"; +const FORK_REPO = "superset"; +const FORK_RELEASES_URL = `https://github.com/${FORK_OWNER}/${FORK_REPO}/releases`; +const FORK_API_URL = `https://api.github.com/repos/${FORK_OWNER}/${FORK_REPO}/releases/latest`; + // Use explicit feed URLs to ensure we always fetch platform-specific manifests // (for example latest-mac.yml and latest-linux.yml) from the correct release. // - Stable: fetches from /releases/latest/download/ (latest non-prerelease) @@ -82,7 +93,6 @@ function isNetworkError(error: Error | string): boolean { let currentStatus: AutoUpdateStatus = AUTO_UPDATE_STATUS.IDLE; let currentVersion: string | undefined; let isDismissed = false; -let isInstalling = false; function emitStatus( status: AutoUpdateStatus, @@ -107,30 +117,17 @@ export function getUpdateStatus(): AutoUpdateStatusEvent { } export function installUpdate(): void { - if (env.NODE_ENV === "development") { - console.info("[auto-updater] Install skipped in dev mode"); + if (IS_FORK) { + import("electron") + .then(({ shell }) => shell.openExternal(FORK_RELEASES_URL)) + .catch(() => {}); emitStatus(AUTO_UPDATE_STATUS.IDLE); return; } - // MacUpdater.quitAndInstall() registers a fresh native-updater - // `update-downloaded` listener each time it runs before Squirrel.Mac has - // finished staging. Without this guard, repeat clicks fan out into - // parallel quitAndInstall calls once Squirrel fires — racing to swap - // the binary and leaving the app on the old version. - if (isInstalling) { - console.info( - "[auto-updater] Install already in progress, ignoring duplicate request", - ); - return; - } - if (currentStatus !== AUTO_UPDATE_STATUS.READY) { - console.warn( - `[auto-updater] Install ignored: update not ready (status=${currentStatus})`, - ); - return; - } - isInstalling = true; - setSkipQuitConfirmation(); + // quitAndInstall internally calls app.quit() — use "stop" mode so + // before-quit doesn't prevent exit on macOS with active host services. + // "release" would keep services alive and block the quit. + prepareQuit("stop"); autoUpdater.quitAndInstall(false, true); } @@ -139,8 +136,125 @@ export function dismissUpdate(): void { autoUpdateEmitter.emit("status-changed", { status: AUTO_UPDATE_STATUS.IDLE }); } +// ── Fork: GitHub API release check ────────────────────────────────────────── + +async function fetchLatestForkRelease(): Promise { + const { net } = await import("electron"); + return new Promise((resolve, reject) => { + const request = net.request({ url: FORK_API_URL, method: "GET" }); + request.setHeader("Accept", "application/vnd.github+json"); + request.setHeader("User-Agent", "Superset-Desktop"); + + let data = ""; + request.on("response", (response) => { + if (response.statusCode !== 200) { + reject(new Error(`GitHub API returned ${response.statusCode}`)); + return; + } + response.on("data", (chunk) => { + data += chunk.toString(); + }); + response.on("end", () => { + try { + const release = JSON.parse(data) as { tag_name: string }; + // Strip "v" or "desktop-v" prefix from tag + const version = release.tag_name.replace(/^(desktop-)?v/, ""); + resolve(valid(version) ? version : null); + } catch { + reject(new Error("Failed to parse GitHub API response")); + } + }); + }); + request.on("error", reject); + request.end(); + }); +} + +async function checkForkForUpdates(interactive: boolean): Promise { + emitStatus(AUTO_UPDATE_STATUS.CHECKING); + + try { + const latestVersion = await fetchLatestForkRelease(); + const currentAppVersion = app.getVersion(); + + if (!latestVersion) { + console.info("[auto-updater:fork] Could not determine latest version"); + emitStatus(AUTO_UPDATE_STATUS.IDLE); + if (interactive) { + dialog.showMessageBox({ + type: "info", + title: "Updates", + message: "Could not determine the latest version.", + }); + } + return; + } + + console.info( + `[auto-updater:fork] Current: ${currentAppVersion}, Latest: ${latestVersion}`, + ); + + if (gt(latestVersion, currentAppVersion)) { + console.info( + `[auto-updater:fork] Update available: ${currentAppVersion} → ${latestVersion}`, + ); + emitStatus(AUTO_UPDATE_STATUS.READY, latestVersion); + } else { + console.info("[auto-updater:fork] Already up to date"); + emitStatus(AUTO_UPDATE_STATUS.IDLE); + if (interactive) { + dialog.showMessageBox({ + type: "info", + title: "No Updates", + message: "You're up to date!", + detail: `Version ${currentAppVersion} is the latest version.`, + }); + } + } + } catch (error) { + const err = error instanceof Error ? error : new Error(String(error)); + if (isNetworkError(err)) { + console.info("[auto-updater:fork] Network unavailable, will retry later"); + emitStatus(AUTO_UPDATE_STATUS.IDLE); + if (interactive) { + dialog.showMessageBox({ + type: "info", + title: "No Internet Connection", + message: + "Unable to check for updates. Please check your internet connection.", + }); + } + return; + } + console.error( + "[auto-updater:fork] Failed to check for updates:", + err.message, + ); + emitStatus(AUTO_UPDATE_STATUS.ERROR, undefined, err.message); + if (interactive) { + dialog.showMessageBox({ + type: "error", + title: "Update Error", + message: "Failed to check for updates. Please try again later.", + }); + } + } +} + +// ── Public check functions ────────────────────────────────────────────────── + export function checkForUpdates(): void { - if (env.NODE_ENV === "development" || !IS_AUTO_UPDATE_PLATFORM) { + if (env.NODE_ENV === "development") { + return; + } + + if (IS_FORK) { + isDismissed = false; + void checkForkForUpdates(false); + return; + } + + if (!IS_AUTO_UPDATE_PLATFORM) { return; } isDismissed = false; @@ -165,6 +279,13 @@ export function checkForUpdatesInteractive(): void { }); return; } + + if (IS_FORK) { + isDismissed = false; + void checkForkForUpdates(true); + return; + } + if (!IS_AUTO_UPDATE_PLATFORM) { dialog.showMessageBox({ type: "info", @@ -215,6 +336,8 @@ export function checkForUpdatesInteractive(): void { }); } +// ── Dev simulation helpers ────────────────────────────────────────────────── + export function simulateUpdateReady(): void { if (env.NODE_ENV !== "development") return; isDismissed = false; @@ -237,13 +360,45 @@ export function simulateError(): void { ); } +// ── Setup ─────────────────────────────────────────────────────────────────── + export function setupAutoUpdater(): void { - if (env.NODE_ENV === "development" || !IS_AUTO_UPDATE_PLATFORM) { + if (env.NODE_ENV === "development") { + return; + } + + // Fork builds: periodic GitHub API check (no electron-updater) + if (IS_FORK) { + console.info( + `[auto-updater:fork] Initialized: version=${app.getVersion()}, checking ${FORK_API_URL}`, + ); + + const interval = setInterval(checkForUpdates, UPDATE_CHECK_INTERVAL_MS); + interval.unref(); + + if (app.isReady()) { + void checkForUpdates(); + } else { + app + .whenReady() + .then(() => checkForUpdates()) + .catch((error) => { + console.error( + "[auto-updater:fork] Failed to start update checks:", + error, + ); + }); + } + return; + } + + // Upstream builds: electron-updater (macOS / Linux only) + if (!IS_AUTO_UPDATE_PLATFORM) { return; } - autoUpdater.autoDownload = true; - autoUpdater.autoInstallOnAppQuit = true; + autoUpdater.autoDownload = false; + autoUpdater.autoInstallOnAppQuit = false; autoUpdater.disableDifferentialDownload = true; // Allow downgrade for prerelease builds so users can switch back to stable @@ -261,8 +416,6 @@ export function setupAutoUpdater(): void { ); autoUpdater.on("error", (error) => { - // Allow retry if Squirrel surfaces an error instead of actually quitting. - isInstalling = false; if (isNetworkError(error)) { console.info("[auto-updater] Network unavailable, will retry later"); emitStatus(AUTO_UPDATE_STATUS.IDLE); diff --git a/apps/desktop/src/main/lib/browser-mcp-bridge/cdp-filter-proxy.ts b/apps/desktop/src/main/lib/browser-mcp-bridge/cdp-filter-proxy.ts new file mode 100644 index 00000000000..ad0b6606788 --- /dev/null +++ b/apps/desktop/src/main/lib/browser-mcp-bridge/cdp-filter-proxy.ts @@ -0,0 +1,1218 @@ +import { randomBytes } from "node:crypto"; +import type { IncomingMessage, ServerResponse } from "node:http"; +import type { Duplex } from "node:stream"; +import { webContents as electronWebContents } from "electron"; +import { type RawData, WebSocket, type WebSocketServer } from "ws"; +import { browserManager } from "../browser/browser-manager"; +import { resolveCdpPort } from "./cdp-port"; +import { + checkMethodPermitted, + isPrivilegedSchemeAllowed, + permissionStore, +} from "./permissions"; + +/** + * CDP filter helpers reused by the single-port gateway (cdp-gateway.ts). + * + * These functions translate a loopback client's browser-level or + * page-level CDP session into one that only sees the bound pane's + * targets. Session routing lives in the gateway — this module is + * concerned with *message-level* filtering once the peer-PID → session + * → bound pane lookup has already happened. + * + * Invariants the gateway relies on: + * • `/devtools/browser/` → proxyBrowserUpgrade. The client + * observes only targetIds in the pane's bound set. `Target.*` + * methods that would affect unrelated targets or tear down the + * browser are rejected with a CDP error. + * • `/devtools/page/` → proxyPageUpgrade. Transparent forward + * of the single page session; scope is already enforced by the + * gateway's targetId check. + * • `Target.setAutoAttach` has its `filter` stripped so + * puppeteer-based clients (chrome-devtools-mcp) don't hang when + * Electron's Chromium doesn't expose a `tab` wrapper above the + * page. + * • Session-scoped frames are admitted only for sessionIds we + * surfaced via `Target.attachedToTarget`; nested attach events + * add their child sessionId transitively, which is required for + * workers / OOPIF / prerender sub-sessions. + */ + +export function sendJson( + res: ServerResponse, + status: number, + body: unknown, +): void { + res.statusCode = status; + res.setHeader("content-type", "application/json"); + res.end(JSON.stringify(body)); +} + +export async function fetchUpstreamJson(path: string): Promise { + const port = await resolveCdpPort(); + if (!port) throw new Error("Chromium CDP port not available"); + const res = await fetch(`http://127.0.0.1:${port}${path}`); + if (!res.ok) { + throw new Error( + `Chromium CDP returned ${res.status} for ${path}: ${await res + .text() + .catch(() => "")}`, + ); + } + return (await res.json()) as unknown; +} + +/** + * Per-session identifier used in `/devtools/browser/` so the URL + * the client sees has the same shape Chromium itself would hand out. + */ +const browserWsIds = new Map(); + +export function browserWsIdFor(sessionId: string): string { + let id = browserWsIds.get(sessionId); + if (!id) { + id = randomBytes(16).toString("hex"); + browserWsIds.set(sessionId, id); + } + return id; +} + +export async function proxyPageUpgrade( + req: IncomingMessage, + socket: Duplex, + head: Buffer, + wss: WebSocketServer, + chromiumPort: number, + targetId: string, +): Promise { + wss.handleUpgrade(req, socket, head, (clientWs) => { + const upstream = new WebSocket( + `ws://127.0.0.1:${chromiumPort}/devtools/page/${targetId}`, + ); + const closeBoth = (): void => { + try { + clientWs.close(); + } catch { + /* ignore */ + } + try { + upstream.close(); + } catch { + /* ignore */ + } + }; + const pending: Array[0]> = []; + clientWs.on("message", (data) => { + if (upstream.readyState === WebSocket.OPEN) { + upstream.send(data); + } else { + pending.push(data as Parameters[0]); + } + }); + upstream.on("open", () => { + for (const buf of pending) upstream.send(buf); + pending.length = 0; + upstream.on("message", (data) => { + if (clientWs.readyState === WebSocket.OPEN) clientWs.send(data); + }); + }); + upstream.on("error", (err) => { + console.warn("[cdp-filter-proxy] page upstream error", err); + closeBoth(); + }); + upstream.on("close", closeBoth); + clientWs.on("error", closeBoth); + clientWs.on("close", closeBoth); + }); +} + +interface JsonRpcMsg { + id?: number; + method?: string; + params?: Record; + result?: Record; + error?: { code: number; message: string }; + sessionId?: string; +} + +/** + * Binding contract for the browser-level filter. + * + * Single-pane (M1) passes `primaryTargetId` as the one bound target + * and returns `{primaryTargetId}` from boundTargetIds(). Multi-tab + * (M2) returns the full Set of tab targetIds for the pane; the filter + * does not need to know about tabs as such. + */ +export interface BoundContext { + paneId: string; + primaryTargetId: string; + /** Current set of bound targetIds. Re-evaluated on each filter hit. */ + boundTargetIds(): ReadonlySet; + /** Optional: WS that should be closed when the bound set changes. */ + onClose?: (ws: WebSocket) => void; +} + +function targetIdOf(obj: unknown): string | undefined { + if (typeof obj !== "object" || obj === null) return undefined; + const t = (obj as { targetId?: unknown }).targetId; + return typeof t === "string" ? t : undefined; +} + +/** + * Electron's BrowserView / shows up in Chromium's CDP as + * `type: "webview"`. puppeteer-core's `browser.pages()` filters by + * `type === "page"`, so the unchanged type would make chrome-devtools- + * mcp's `list_pages` / `evaluate_script` see zero pages even when the + * bound pane is alive. We rewrite "webview" to "page" on the way out + * so external CDP clients treat the pane as a normal page target. + */ +function rewriteTargetInfoType( + info: T, +): T { + if (!info) return info; + if (info.type === "webview") { + return { ...info, type: "page" } as T; + } + return info; +} + +function shortSid(sid: string | undefined): string { + if (!sid) return "(root)"; + return sid.slice(0, 8); +} + +function shortTid(tid: string | undefined): string { + if (!tid) return "?"; + return tid.slice(0, 8); +} + +function summarizeParams(method: string, params: unknown): string { + if (!params || typeof params !== "object") return ""; + const p = params as Record; + const parts: string[] = []; + if (typeof p.url === "string") parts.push(`url=${p.url.slice(0, 60)}`); + if (typeof p.targetId === "string") parts.push(`tid=${shortTid(p.targetId)}`); + if (typeof p.sessionId === "string") + parts.push(`sid=${shortSid(p.sessionId)}`); + if (typeof p.expression === "string") + parts.push(`js=${p.expression.slice(0, 40)}…`); + if (typeof p.text === "string") parts.push(`text=${p.text.slice(0, 30)}`); + if (typeof p.x === "number" && typeof p.y === "number") + parts.push(`xy=${p.x},${p.y}`); + if (typeof p.newWindow === "boolean") parts.push(`newWindow=${p.newWindow}`); + if (typeof p.background === "boolean") + parts.push(`background=${p.background}`); + // Input/keyboard/focus diagnostics for the "keys leaked into the + // Superset terminal pane" bug. Knowing whether the MCP sent the + // event as root-scoped vs child-session-scoped is the key signal: + // root-scoped Input.* targets whichever webContents currently + // holds OS focus in the host BrowserWindow, which on a + // multi-pane Superset window can be the xterm.js terminal pane + // instead of the bound . + if (method.startsWith("Input.")) { + if (typeof p.type === "string") parts.push(`ev=${p.type}`); + if (typeof p.key === "string") parts.push(`key=${p.key}`); + if (typeof p.code === "string") parts.push(`code=${p.code}`); + if (typeof p.windowsVirtualKeyCode === "number") + parts.push(`vk=${p.windowsVirtualKeyCode}`); + if (typeof p.button === "string") parts.push(`btn=${p.button}`); + } + void method; + return parts.join(" "); +} + +/** + * Method names whose dispatch we want explicit, high-visibility + * logging for. These are the CDP calls implicated in the reported + * "MCP typed into the terminal pane instead of the browser" bug: + * - Input.* delivers synthetic keyboard/mouse events. If sent on the + * root session (no `sessionId` / `msg.sessionId`) they hit the host + * BrowserWindow's focused webContents, which may be Superset's own + * terminal pane rather than the bound . + * - Page.bringToFront / Target.activateTarget can pull OS focus over + * from the bound onto another webContents, setting up + * the focus-miss for the next Input.* call. + * - DOM.focus / Runtime.evaluate("element.focus()") can have the + * same effect but are harder to detect; we log them at normal + * verbosity only. + */ +function isFocusAffectingMethod(method: string): boolean { + return ( + method.startsWith("Input.") || + method === "Page.bringToFront" || + method === "Target.activateTarget" || + method === "Emulation.setFocusEmulationEnabled" + ); +} + +let cdpConnSeq = 0; + +export async function proxyBrowserUpgrade( + req: IncomingMessage, + socket: Duplex, + head: Buffer, + wss: WebSocketServer, + chromiumPort: number, + ctx: BoundContext, +): Promise { + cdpConnSeq += 1; + const connId = cdpConnSeq; + console.log( + `[cdp #${connId}] proxyBrowserUpgrade pane=${ctx.paneId} primary=${shortTid( + ctx.primaryTargetId, + )} bound=${Array.from(ctx.boundTargetIds()).map(shortTid).join(",")}`, + ); + let chromiumBrowserWs: string; + try { + const ver = (await fetchUpstreamJson("/json/version")) as { + webSocketDebuggerUrl?: string; + }; + if (!ver.webSocketDebuggerUrl) { + socket.destroy(); + return; + } + const parsed = new URL(ver.webSocketDebuggerUrl); + parsed.host = `127.0.0.1:${chromiumPort}`; + chromiumBrowserWs = parsed.toString(); + } catch (error) { + console.warn("[cdp-filter-proxy] could not resolve browser WS:", error); + socket.destroy(); + return; + } + + wss.handleUpgrade(req, socket, head, (clientWs) => { + const upstream = new WebSocket(chromiumBrowserWs); + const pendingMethods = new Map(); + const allowedSessionIds = new Set(); + // Track the targetId each admitted child session belongs to so + // session-scoped methods (Page.bringToFront, Page.navigate, …) + // can be routed back to a pane when UI bridging is needed. + const sessionIdToTargetId = new Map(); + // Allowed targetIds: starts as the bound set, but grows + // transitively: any target whose `openerId` is already allowed + // is admitted too. Without this, real children of the bound + // page (popups, OOPIF iframes, dedicated/service workers + // scoped to the page, prerender) are dropped at the root + // filter even though clients legitimately need to interact + // with them. childToParent lets us prune the closure when a + // target is destroyed. + const allowedTargetIds = new Set(ctx.boundTargetIds()); + const childToParent = new Map(); + // Internal requests the proxy issues (e.g. to proactively + // attach to a newly-spawned secondary tab so Chromium emits + // Target.attachedToTarget even when its own auto-attach logic + // skips Electron -sourced targets). Responses with + // these ids must NOT reach the client because it never knew + // about them. + const INTERNAL_REQ_BASE = 0x7fff_0000; + let internalReqSeq = 0; + const internalPendingIds = new Set(); + // Timers for in-flight Target.createTarget waits; cleared on + // WS close so a tardy renderer response can't leak timers / + // resolve a dead connection. + const pendingCreateTimers = new Set(); + const refreshBound = (): void => { + for (const tid of ctx.boundTargetIds()) allowedTargetIds.add(tid); + }; + const isAllowedTarget = ( + info: { targetId?: string; openerId?: string } | undefined, + fallbackTargetId?: string, + ): boolean => { + refreshBound(); + const tid = info?.targetId ?? fallbackTargetId; + if (!tid) return false; + if (allowedTargetIds.has(tid)) return true; + const opener = info?.openerId; + if (opener && allowedTargetIds.has(opener)) { + allowedTargetIds.add(tid); + childToParent.set(tid, opener); + return true; + } + return false; + }; + const dropTarget = (tid: string | undefined): void => { + if (!tid) return; + // Recursively drop descendants: childToParent maps child + // targetId -> parent targetId, so any entry whose parent + // is (transitively) tid should also be evicted. Without + // this, popup/worker/prerender children of a closed tab + // stay in allowedTargetIds forever, leaking scope + // long-term on long-lived MCP connections. + const queue: string[] = [tid]; + const visited = new Set(); + while (queue.length > 0) { + const cur = queue.shift() as string; + if (visited.has(cur)) continue; + visited.add(cur); + allowedTargetIds.delete(cur); + for (const [child, parent] of childToParent) { + if (parent === cur && !visited.has(child)) queue.push(child); + } + childToParent.delete(cur); + } + // Also clear any childToParent entry whose parent is now gone. + for (const [child, parent] of Array.from(childToParent)) { + if (visited.has(parent)) childToParent.delete(child); + } + }; + // Frames the client sends before Chromium's browser WS reaches + // OPEN. CDP clients (puppeteer, cdp-use) typically fire + // `Target.setDiscoverTargets` / `Target.setAutoAttach` the + // instant our handshake completes, so dropping them would + // deadlock `connect()`. `proxyPageUpgrade` buffers the same way. + const pendingUpstream: unknown[] = []; + if (ctx.onClose) ctx.onClose(clientWs); + + const closeBoth = (): void => { + for (const t of pendingCreateTimers) clearTimeout(t); + pendingCreateTimers.clear(); + try { + clientWs.close(); + } catch { + /* ignore */ + } + try { + upstream.close(); + } catch { + /* ignore */ + } + }; + + const sendToClient = (obj: unknown): void => { + if (clientWs.readyState !== WebSocket.OPEN) return; + try { + clientWs.send(JSON.stringify(obj)); + } catch { + /* ignore */ + } + }; + const sendToUpstream = (obj: unknown): void => { + if (upstream.readyState === WebSocket.CONNECTING) { + pendingUpstream.push(obj); + return; + } + if (upstream.readyState !== WebSocket.OPEN) return; + try { + upstream.send(JSON.stringify(obj)); + } catch { + /* ignore */ + } + }; + upstream.on("open", () => { + for (const obj of pendingUpstream) { + try { + upstream.send(JSON.stringify(obj)); + } catch { + /* ignore */ + } + } + pendingUpstream.length = 0; + }); + // Use -32000 (server error) instead of -32601 (method not + // found) so CDP clients (puppeteer / cdp-use) treat the + // rejection as a normal protocol failure instead of falling + // back to "method does not exist on this version of Chromium" + // retry/normalization paths. + const rejectRequest = (id: number, message: string): void => { + sendToClient({ id, error: { code: -32000, message } }); + }; + + clientWs.on("message", (data: RawData) => { + let msg: JsonRpcMsg; + try { + msg = JSON.parse(data.toString()) as JsonRpcMsg; + } catch { + return; + } + const id = typeof msg.id === "number" ? msg.id : undefined; + const method = msg.method ?? ""; + const summary = summarizeParams(method, msg.params); + const paramsSid = + typeof (msg.params as { sessionId?: unknown } | undefined) + ?.sessionId === "string" + ? ((msg.params as { sessionId?: string }).sessionId as string) + : undefined; + if (isFocusAffectingMethod(method)) { + // High-visibility log line for focus/input-related calls. + // We care specifically whether the MCP carried the event + // on the webview's child session (msg.sessionId set, or + // params.sessionId set for non-flatten paths) or on the + // root session. Root-scoped Input.* is the smoking gun + // for the terminal-leak bug. + const scope = msg.sessionId + ? `child-flatten sid=${shortSid(msg.sessionId)}` + : paramsSid + ? `child-params sid=${shortSid(paramsSid)}` + : "ROOT"; + console.warn( + `[cdp #${connId}] [focus/input] →up ${method} scope=${scope} id=${id ?? "-"}${summary ? ` ${summary}` : ""}`, + ); + } + if (msg.sessionId) { + const allowed = allowedSessionIds.has(msg.sessionId); + console.log( + `[cdp #${connId}] →up sid=${shortSid(msg.sessionId)} id=${id ?? "-"} ${method}${summary ? ` ${summary}` : ""}${allowed ? "" : " [DROPPED unknown sid]"}`, + ); + if (allowed) { + // Session-scoped messages must still go through the + // permission gate. Puppeteer / cdp-use send the + // majority of page-domain commands (Debugger.*, + // Network.*, Storage.*, DOMStorage.*, …) on a child + // session, not the root session, so checking only + // the root path would let permissive-gated methods + // slip through under any preset — including the + // "Secure" default. (Raised in CodeRabbit review on + // PR #371.) + const perm = checkMethodPermitted( + method, + permissionStore.getActiveToggles(), + ); + if (!perm.allowed) { + if (id !== undefined) { + rejectRequest( + id, + perm.reason ?? + `${method} is not permitted by the Superset CDP filter`, + ); + } + return; + } + // Page.bringToFront is session-scoped; surface it as + // a renderer-side tab activation so the tab bar + // follows the MCP. + if (method === "Page.bringToFront") { + const tid = sessionIdToTargetId.get(msg.sessionId); + if (tid) { + const paneForTid = browserManager.getPaneIdForTarget(tid); + if (paneForTid) { + const tabId = browserManager.getTabIdForTarget(paneForTid, tid); + browserManager.requestTabActivation(paneForTid, tabId); + } + } + } + // Electron quirk: session-scoped CDP Input.* events + // SHOULD target the session's renderer, but on some + // platforms Chromium delivers synthetic key/mouse + // events to whichever widget currently holds + // Chromium-internal focus. If the user clicked the + // Superset terminal pane (xterm.js), the browser + // pane's webContents has lost internal focus and + // MCP keystrokes land in the terminal instead of + // the page. Force-focus the bound webContents right + // before forwarding Input.* so the synthetic events + // hit the intended target. + if (method.startsWith("Input.")) { + const tid = sessionIdToTargetId.get(msg.sessionId); + if (tid) { + const paneForTid = browserManager.getPaneIdForTarget(tid); + if (paneForTid) { + const tabId = browserManager.getTabIdForTarget(paneForTid, tid); + const wcId = browserManager.getWebContentsIdForTab( + paneForTid, + tabId, + ); + if (wcId != null) { + const wc = electronWebContents.fromId(wcId); + try { + wc?.focus(); + } catch { + /* webContents may be gone */ + } + } + } + } + } + sendToUpstream(msg); + } else if (id !== undefined) { + rejectRequest( + id, + "The supplied CDP sessionId is not authorized for this Superset session (the session may have been detached or belong to another pane).", + ); + } + return; + } + console.log( + `[cdp #${connId}] →up (root) id=${id ?? "-"} ${method}${summary ? ` ${summary}` : ""}`, + ); + refreshBound(); + // `bound` here is the dynamic allow-list (bound primary + + // pane tab targets + transitively-admitted children of any + // of those via openerId). Children are added by the + // attachedToTarget event handler below; checking against + // this set instead of the static ctx.boundTargetIds() + // admits popups, OOPIF iframes, dedicated/service workers + // scoped to the bound page, and prerender targets that + // puppeteer / cdp-use legitimately need to drive. + const bound = allowedTargetIds; + + // Permission check: consults the active preset's toggles. + // Always-denied methods (Browser.close, Page.close, etc.) + // are baked into permissions.ts; toggle-gated methods flip + // with user-selected preset. + const perm = checkMethodPermitted( + method, + permissionStore.getActiveToggles(), + ); + if (!perm.allowed) { + if (id !== undefined) { + rejectRequest( + id, + perm.reason ?? + `${method} is not permitted by the Superset CDP filter`, + ); + } + return; + } + + // Strip the `filter` field from Target.setAutoAttach AND + // Target.setDiscoverTargets: + // - puppeteer (chrome-devtools-mcp) sends setAutoAttach with + // `[{type:'page', exclude:true}]` waiting for a `tab` + // wrapper Electron does not expose, which would hang + // `connect()`. + // - browser-use (cdp-use) sends setDiscoverTargets with + // `[{type:'page'}]`. Electron's is reported as + // `type:'webview'`, so the bound primary is excluded from + // discovery, Target.getTargets returns no matches, and + // browser-use's SessionManager errors with "Root CDP + // client not initialized" — the user-reported "セッション + // が切れている" symptom. + // Removing the filter forces Chromium to surface every type; + // our downstream Target event/result filter still scopes + // the client's view to bound targetIds and rewrites + // type=webview → page so puppeteer/cdp-use treat it as a + // regular page. + if ( + (method === "Target.setAutoAttach" || + method === "Target.setDiscoverTargets") && + id !== undefined + ) { + const original = (msg.params ?? {}) as Record; + const rewritten: Record = { ...original }; + if ("filter" in rewritten) delete rewritten.filter; + pendingMethods.set(id, method); + sendToUpstream({ id, method, params: rewritten }); + return; + } + + if (method === "Target.attachToTarget" && id !== undefined) { + const targetId = targetIdOf(msg.params); + if (targetId && !bound.has(targetId)) { + rejectRequest( + id, + "This Superset session is scoped to the bound pane; attachToTarget for other targets is refused.", + ); + return; + } + pendingMethods.set(id, method); + sendToUpstream(msg); + return; + } + + if (method === "Target.activateTarget" && id !== undefined) { + const tid = targetIdOf(msg.params); + if (tid && !bound.has(tid)) { + rejectRequest( + id, + "Target.activateTarget for other targets is refused by the Superset CDP filter.", + ); + return; + } + // Bridge MCP-driven tab activation into the renderer + // tab-bar so the user sees the same tab the MCP is + // driving (matches Chrome's tab-strip-follows-CDP + // behaviour). The renderer flips its activeTabId on + // receipt of the activate-tab-requested event. + if (tid) { + const paneForTid = browserManager.getPaneIdForTarget(tid); + if (paneForTid) { + const tabId = browserManager.getTabIdForTarget(paneForTid, tid); + browserManager.requestTabActivation(paneForTid, tabId); + } + } + pendingMethods.set(id, method); + sendToUpstream(msg); + return; + } + + if (method === "Target.closeTarget" && id !== undefined) { + const tid = targetIdOf(msg.params); + if (!tid || !bound.has(tid)) { + rejectRequest( + id, + "Target.closeTarget for other targets is refused by the Superset CDP filter.", + ); + return; + } + pendingMethods.set(id, method); + sendToUpstream(msg); + return; + } + + if (method === "Target.createTarget" && id !== undefined) { + const params = msg.params as + | { + url?: string; + background?: boolean; + newWindow?: boolean; + browserContextId?: string; + forTab?: boolean; + } + | undefined; + // Reject the browserContextId / newWindow / forTab + // flavours: Superset doesn't expose multiple browser + // contexts (incognito) and never opens its own OS + // window for an MCP, so honouring those params would + // silently lie to the client about what was created. + // Tell the truth instead so puppeteer / playwright + // surface a clean error. + if ( + params?.browserContextId || + params?.newWindow === true || + params?.forTab !== undefined + ) { + rejectRequest( + id, + "Target.createTarget with browserContextId / newWindow / forTab is not supported by the Superset CDP filter; tabs are always created inside the bound pane.", + ); + return; + } + const nextUrl = + typeof params?.url === "string" && params.url !== "" + ? params.url + : "about:blank"; + // Allow only http(s) and about:blank by default. The + // "privilegedSchemes" permission toggle lets the user + // opt in to file://, chrome://, devtools://, + // javascript:, data: — these either escape the pane + // (privileged schemes) or let the client execute + // arbitrary code with the bound origin's privileges. + if (nextUrl !== "about:blank") { + let parsed: URL | null = null; + try { + parsed = new URL(nextUrl); + } catch { + parsed = null; + } + const scheme = parsed?.protocol ?? ""; + const isSafeScheme = scheme === "http:" || scheme === "https:"; + if ( + !isSafeScheme && + !isPrivilegedSchemeAllowed(permissionStore.getActiveToggles()) + ) { + rejectRequest( + id, + `Target.createTarget url scheme ${scheme || "(invalid)"} requires the "privilegedSchemes" permission toggle.`, + ); + return; + } + } + // Tell the renderer to spawn a real new tab + // for this pane. Wait for browser-manager to register + // the new targetId (via addPaneTabTarget → tab-target- + // added event) before responding, so the MCP gets the + // new tab's id and not the primary's. Without this the + // MCP attaches to whatever id we hand back and ends up + // driving the wrong tab (the user-reported "新しいタブで + // 検索すると最初のタブで検索される" behaviour). + // Correlate this createTarget with the renderer reply + // so concurrent createTarget calls (e.g. browser-use + // and chrome-devtools-mcp opening tabs at the same + // time) don't race each other onto the same new-tab + // event. + const requestId = `req-${connId}-${Date.now().toString(36)}-${id}`; + const waitForNewTab = (): Promise => { + return new Promise((resolveTarget) => { + const handler = (payload: { + requestId?: string; + targetId: string; + }) => { + if (payload.requestId !== requestId) return; + browserManager.off(eventName, handler); + clearTimeout(timer); + pendingCreateTimers.delete(timer); + console.log( + `[cdp #${connId}] createTarget: new tab targetId=${shortTid(payload.targetId)} (req=${requestId})`, + ); + resolveTarget(payload.targetId); + }; + const eventName = `tab-target-added-for:${ctx.paneId}`; + browserManager.on(eventName, handler); + const timer = setTimeout(() => { + browserManager.off(eventName, handler); + pendingCreateTimers.delete(timer); + console.warn( + `[cdp #${connId}] createTarget: TIMEOUT waiting for new tab req=${requestId}`, + ); + // Surface a real error rather than silently + // falling back to the primary targetId. A + // fallback makes the client think a new tab + // was created and then drive the primary, + // which looks like "the MCP opened a tab + // but then searched in the old one". + resolveTarget(null); + }, 8000); + pendingCreateTimers.add(timer); + }); + }; + console.log( + `[cdp #${connId}] createTarget: spawning new tab url=${nextUrl} req=${requestId}`, + ); + try { + console.log( + `[tab-diag] createTarget→emit pane=${ctx.paneId} url=${nextUrl} req=${requestId} bg=${params?.background === true}`, + ); + browserManager.emit(`create-tab-requested:${ctx.paneId}`, { + url: nextUrl, + requestId, + background: params?.background === true, + }); + } catch { + /* best effort */ + } + void waitForNewTab().then((newTargetId) => { + if (!newTargetId) { + rejectRequest( + id, + "Target.createTarget timed out waiting for the renderer to spawn a new tab.", + ); + return; + } + console.log( + `[cdp #${connId}] createTarget: responding id=${id} targetId=${shortTid(newTargetId)}`, + ); + // Admit the new tab into the scope so subsequent + // events (attachedToTarget, targetInfoChanged, + // Page.frameNavigated, …) survive the root filter. + allowedTargetIds.add(newTargetId); + childToParent.set(newTargetId, ctx.primaryTargetId); + sendToClient({ id, result: { targetId: newTargetId } }); + // Chromium's auto-attach does not fire for Electron + // targets we just created via the renderer + // side-channel, so puppeteer's newPage() would hang + // forever waiting for Target.attachedToTarget. + // Proactively attach here (via an internal id the + // client will never see a response for) so Chromium + // emits the attachedToTarget event that our filter + // then forwards to the client as if auto-attach had + // produced it. + internalReqSeq += 1; + const internalId = INTERNAL_REQ_BASE + internalReqSeq; + internalPendingIds.add(internalId); + sendToUpstream({ + id: internalId, + method: "Target.attachToTarget", + params: { targetId: newTargetId, flatten: true }, + }); + }); + return; + } + + if (method === "Target.getTargets" || method === "Target.getTargetInfo") { + if (id !== undefined) pendingMethods.set(id, method); + sendToUpstream(msg); + return; + } + + if (method === "Target.detachFromTarget" && id !== undefined) { + const sid = (msg.params as { sessionId?: string } | undefined) + ?.sessionId; + const tid = targetIdOf(msg.params); + if ((sid && !allowedSessionIds.has(sid)) || (tid && !bound.has(tid))) { + rejectRequest( + id, + "Target.detachFromTarget outside the bound scope is refused by the Superset CDP filter.", + ); + return; + } + pendingMethods.set(id, method); + sendToUpstream(msg); + return; + } + + // Remaining Target.* methods that address a specific target or + // session (e.g. sendMessageToTarget, setRemoteLocations). Verify + // any supplied targetId / sessionId are inside scope before + // forwarding. + if (method.startsWith("Target.") && id !== undefined) { + const tid = targetIdOf(msg.params); + const sid = (msg.params as { sessionId?: string } | undefined) + ?.sessionId; + if (tid && !bound.has(tid)) { + rejectRequest(id, `${method} targetId is outside the bound scope.`); + return; + } + if (sid && !allowedSessionIds.has(sid)) { + rejectRequest( + id, + `${method} sessionId is not authorized for this Superset session.`, + ); + return; + } + pendingMethods.set(id, method); + sendToUpstream(msg); + return; + } + + if (id !== undefined) pendingMethods.set(id, method); + sendToUpstream(msg); + }); + + upstream.on("message", (data: RawData) => { + let msg: JsonRpcMsg; + try { + msg = JSON.parse(data.toString()) as JsonRpcMsg; + } catch { + return; + } + refreshBound(); + // `bound` here is the dynamic allow-list (bound primary + + // pane tab targets + transitively-admitted children of any + // of those via openerId). Children are added by the + // attachedToTarget event handler below; checking against + // this set instead of the static ctx.boundTargetIds() + // admits popups, OOPIF iframes, dedicated/service workers + // scoped to the bound page, and prerender targets that + // puppeteer / cdp-use legitimately need to drive. + const bound = allowedTargetIds; + const summary = summarizeParams(msg.method ?? "", msg.params); + + if (msg.sessionId) { + if (!allowedSessionIds.has(msg.sessionId)) { + console.log( + `[cdp #${connId}] ←dn sid=${shortSid(msg.sessionId)} ${msg.method ?? "?"} [DROPPED unknown sid]`, + ); + return; + } + console.log( + `[cdp #${connId}] ←dn sid=${shortSid(msg.sessionId)} id=${msg.id ?? "-"} ${msg.method ?? "(response)"}${summary ? ` ${summary}` : ""}`, + ); + if (msg.method === "Target.attachedToTarget") { + // Nested attach (worker / iframe / prerender of a + // target the parent session already owns). Trust + // the parent: admit both the child sessionId AND + // the child targetId via openerId so subsequent + // session-scoped frames + root events reach the + // client cleanly. + const params = msg.params as + | { + sessionId?: string; + targetInfo?: { targetId?: string; openerId?: string }; + } + | undefined; + const childSid = params?.sessionId; + if (childSid) allowedSessionIds.add(childSid); + const childTid = params?.targetInfo?.targetId; + if (childTid) { + allowedTargetIds.add(childTid); + const opener = params?.targetInfo?.openerId; + if (opener) childToParent.set(childTid, opener); + } + } else if (msg.method === "Target.detachedFromTarget") { + const params = msg.params as + | { sessionId?: string; targetId?: string } + | undefined; + const childSid = params?.sessionId; + if (childSid) allowedSessionIds.delete(childSid); + const childTid = params?.targetId; + if (childTid) dropTarget(childTid); + } + sendToClient(msg); + return; + } + + if (typeof msg.id === "number") { + if (internalPendingIds.has(msg.id)) { + internalPendingIds.delete(msg.id); + // Proactive Target.attachToTarget reply. The + // resulting Target.attachedToTarget event has + // already been delivered separately and is what + // the client actually cares about; swallow the + // response to avoid forwarding an id the client + // never issued. + const sid = (msg.result as { sessionId?: string } | undefined) + ?.sessionId; + if (sid) allowedSessionIds.add(sid); + console.log( + `[cdp #${connId}] internal attach response id=${msg.id} sid=${shortSid(sid)} consumed`, + ); + return; + } + const origMethod = pendingMethods.get(msg.id); + pendingMethods.delete(msg.id); + console.log( + `[cdp #${connId}] ←dn (root) id=${msg.id} response-to=${origMethod ?? "?"}${msg.error ? ` ERROR=${msg.error.message}` : ""}`, + ); + if (origMethod === "Target.getTargets" && msg.result) { + const infos = (msg.result.targetInfos ?? []) as Array< + { type?: string } & Record + >; + const filtered = infos + .filter((i) => { + const tid = targetIdOf(i); + return tid !== undefined && bound.has(tid); + }) + .map((i) => rewriteTargetInfoType(i)); + console.log( + "[cdp-filter-proxy] Target.getTargets upstream returned", + infos.length, + "infos | bound set", + Array.from(bound), + "| filtered to", + filtered.length, + "| upstream ids:", + infos.map((i) => `${i.type}:${targetIdOf(i)}`), + ); + sendToClient({ + ...msg, + result: { ...msg.result, targetInfos: filtered }, + }); + return; + } + if (origMethod === "Target.attachToTarget" && msg.result) { + const sid = (msg.result as { sessionId?: string }).sessionId; + if (sid) allowedSessionIds.add(sid); + } + if (origMethod === "Target.getTargetInfo" && msg.result?.targetInfo) { + const tid = targetIdOf(msg.result.targetInfo); + if (!tid || !bound.has(tid)) { + sendToClient({ + id: msg.id, + error: { code: -32000, message: "target not found" }, + }); + return; + } + sendToClient({ + ...msg, + result: { + ...msg.result, + targetInfo: rewriteTargetInfoType( + msg.result.targetInfo as { type?: string }, + ), + }, + }); + return; + } + sendToClient(msg); + return; + } + + const method = msg.method ?? ""; + if ( + method === "Target.targetCreated" || + method === "Target.targetInfoChanged" + ) { + // Admit the target if it is in scope OR if its + // `openerId` is in scope (popups, child pages, etc.). + const params = msg.params as + | { + targetInfo?: { + type?: string; + targetId?: string; + openerId?: string; + attached?: boolean; + }; + targetId?: string; + } + | undefined; + const info = params?.targetInfo; + const allowed = isAllowedTarget(info, params?.targetId); + const tid = info?.targetId ?? params?.targetId; + if (!allowed) { + console.log( + `[cdp #${connId}] ←dn (root) ${method} tid=${shortTid(tid)} [DROPPED not in bound]`, + ); + return; + } + console.log( + `[cdp #${connId}] ←dn (root) ${method} tid=${shortTid(tid)} type=${info?.type ?? "?"}`, + ); + // Chromium's own auto-attach logic can miss Electron- + // hosted targets (webview-derived pages, Service + // Workers / Shared Workers / Dedicated Workers scoped + // to the bound page, popup windows, prerender targets) + // — puppeteer and playwright both only materialise + // Page / Worker / ServiceWorker objects from + // attachedToTarget, so a target that is merely + // announced via targetCreated but never attached stays + // invisible to the MCP. Proactively attach any + // newly-created target whose type is one of the + // relevant kinds and which we have not already + // attached to. Chromium emits the real + // attachedToTarget that our filter then forwards. + const t = info?.type; + if ( + method === "Target.targetCreated" && + tid && + info?.attached !== true && + (t === "page" || + t === "iframe" || + t === "service_worker" || + t === "shared_worker" || + t === "worker" || + t === "prerender" || + t === "webview") + ) { + internalReqSeq += 1; + const internalId = INTERNAL_REQ_BASE + internalReqSeq; + internalPendingIds.add(internalId); + console.log( + `[cdp #${connId}] proactive attach tid=${shortTid(tid)} type=${t} internalId=${internalId}`, + ); + sendToUpstream({ + id: internalId, + method: "Target.attachToTarget", + params: { targetId: tid, flatten: true }, + }); + } + if (info) { + sendToClient({ + ...msg, + params: { + ...params, + targetInfo: rewriteTargetInfoType(info), + }, + }); + } else { + sendToClient(msg); + } + return; + } + if ( + method === "Target.targetDestroyed" || + method === "Target.targetCrashed" + ) { + const params = msg.params as + | { + targetInfo?: { type?: string; targetId?: string }; + targetId?: string; + } + | undefined; + const info = params?.targetInfo; + const tid = info?.targetId ?? params?.targetId; + if (!tid || !bound.has(tid)) { + console.log( + `[cdp #${connId}] ←dn (root) ${method} tid=${shortTid(tid)} [DROPPED not in bound]`, + ); + return; + } + console.log( + `[cdp #${connId}] ←dn (root) ${method} tid=${shortTid(tid)} type=${info?.type ?? "?"}`, + ); + dropTarget(tid); + if (info) { + sendToClient({ + ...msg, + params: { + ...params, + targetInfo: rewriteTargetInfoType(info), + }, + }); + } else { + sendToClient(msg); + } + return; + } + if (method === "Target.attachedToTarget") { + const params = msg.params as + | { + sessionId?: string; + targetInfo?: { + type?: string; + targetId?: string; + openerId?: string; + }; + } + | undefined; + const allowed = isAllowedTarget(params?.targetInfo); + const tid = params?.targetInfo?.targetId; + if (!allowed) { + console.log( + `[cdp #${connId}] ←dn (root) attachedToTarget tid=${shortTid(tid)} [DROPPED not in bound]`, + ); + return; + } + if (params?.sessionId) allowedSessionIds.add(params.sessionId); + if (params?.sessionId && tid) { + sessionIdToTargetId.set(params.sessionId, tid); + } + console.log( + `[cdp #${connId}] ←dn (root) attachedToTarget tid=${shortTid(tid)} sid=${shortSid(params?.sessionId)} (allowed=${allowedSessionIds.size})`, + ); + sendToClient({ + ...msg, + params: { + ...(params ?? {}), + targetInfo: rewriteTargetInfoType(params?.targetInfo), + }, + }); + return; + } + if (method === "Target.detachedFromTarget") { + const sid = (msg.params as { sessionId?: string } | undefined) + ?.sessionId; + if (!sid || !allowedSessionIds.has(sid)) return; + allowedSessionIds.delete(sid); + console.log( + `[cdp #${connId}] ←dn (root) detachedFromTarget sid=${shortSid(sid)} (allowed=${allowedSessionIds.size})`, + ); + sendToClient(msg); + return; + } + // Target.receivedMessageFromTarget carries a session-scoped + // payload on the browser-level socket (the non-flatten path). + // Drop it when the inner sessionId or targetId is outside our + // scope so we don't leak another pane's CDP traffic. + if (method === "Target.receivedMessageFromTarget") { + const params = msg.params as + | { sessionId?: string; targetId?: string } + | undefined; + if (params?.sessionId && !allowedSessionIds.has(params.sessionId)) + return; + if (params?.targetId && !bound.has(params.targetId)) return; + sendToClient(msg); + return; + } + // Any other Target.* event that mentions a targetId / + // sessionId must also be scoped. + if (method.startsWith("Target.")) { + const tid = + targetIdOf(msg.params) ?? + (msg.params as { targetInfo?: { targetId?: string } } | undefined) + ?.targetInfo?.targetId; + const sid = (msg.params as { sessionId?: string } | undefined) + ?.sessionId; + if (tid && !bound.has(tid)) return; + if (sid && !allowedSessionIds.has(sid)) return; + sendToClient(msg); + return; + } + sendToClient(msg); + }); + + upstream.on("error", (err) => { + console.warn("[cdp-filter-proxy] browser upstream error", err); + closeBoth(); + }); + upstream.on("close", (code, reason) => { + console.log( + "[cdp-filter-proxy] upstream WS closed", + code, + reason?.toString?.() ?? "", + ); + closeBoth(); + }); + clientWs.on("error", (err) => { + console.warn("[cdp-filter-proxy] client WS error", err); + closeBoth(); + }); + clientWs.on("close", (code, reason) => { + console.log( + "[cdp-filter-proxy] client WS closed", + code, + reason?.toString?.() ?? "", + ); + closeBoth(); + }); + }); +} diff --git a/apps/desktop/src/main/lib/browser-mcp-bridge/cdp-gateway.ts b/apps/desktop/src/main/lib/browser-mcp-bridge/cdp-gateway.ts new file mode 100644 index 00000000000..e88fd68f20b --- /dev/null +++ b/apps/desktop/src/main/lib/browser-mcp-bridge/cdp-gateway.ts @@ -0,0 +1,453 @@ +import { chmodSync, mkdirSync, writeFileSync } from "node:fs"; +import type { IncomingMessage, ServerResponse } from "node:http"; +import type { Socket } from "node:net"; +import { dirname, join } from "node:path"; +import type { Duplex } from "node:stream"; +import { type WebSocket, WebSocketServer } from "ws"; +import { bindingStore } from "../../../lib/trpc/routers/browser-automation/index"; +import { SUPERSET_HOME_DIR } from "../app-environment"; +import { browserManager } from "../browser/browser-manager"; +import { + type BoundContext, + browserWsIdFor, + fetchUpstreamJson, + proxyBrowserUpgrade, + proxyPageUpgrade, + sendJson, +} from "./cdp-filter-proxy"; +import { resolveCdpPort } from "./cdp-port"; +import { resolvePidToSession } from "./pane-resolver"; +import { resolvePeerPidFromRemotePort } from "./peer-pid"; +import { permissionStore } from "./permissions"; + +/** + * Single-port CDP gateway. + * + * Serves the endpoints external CDP MCPs expect (`/json/*`, + * `/devtools/browser/`, `/devtools/page/`) on the bridge port + * (47834) and resolves which LLM session (and therefore which bound + * pane) the caller belongs to *per connection* via a loopback peer-PID + * walk — so the registration URL stays constant across Superset / OS + * restarts, pane rebindings, and new terminal panes. + * + * Security: loopback-only. The peer-PID walk additionally requires the + * caller to descend from a live Superset terminal pane. + * + * These endpoints are unauthenticated because puppeteer composes + * `new URL("/json/version", browserURL)` and drops any path/query/ + * Authorization header from the base URL. Capability is instead the + * peer-PID tree-descendant check. + */ + +const wss = new WebSocketServer({ noServer: true }); + +const GLOBAL_BROWSER_USE_CONFIG_PATH = join( + SUPERSET_HOME_DIR, + "browser-use-mcp.json", +); + +export function getGlobalBrowserUseConfigPath(): string { + return GLOBAL_BROWSER_USE_CONFIG_PATH; +} + +export function ensureGlobalBrowserUseConfig(bridgePort: number): void { + const payload = { + browser_profile: { + "superset-gateway": { + id: "superset-gateway", + default: true, + cdp_url: `http://127.0.0.1:${bridgePort}`, + }, + }, + llm: {}, + agent: {}, + }; + try { + mkdirSync(dirname(GLOBAL_BROWSER_USE_CONFIG_PATH), { recursive: true }); + writeFileSync( + GLOBAL_BROWSER_USE_CONFIG_PATH, + JSON.stringify(payload, null, 2), + { mode: 0o600 }, + ); + try { + chmodSync(GLOBAL_BROWSER_USE_CONFIG_PATH, 0o600); + } catch { + /* best effort */ + } + } catch (error) { + console.warn( + "[cdp-gateway] failed to write global browser-use config:", + error, + ); + } +} + +/** + * Session resolution is keyed to the TCP socket (peer-PID lookup is + * expensive and the socket identifies a single external MCP process), + * but the paneId binding is intentionally NOT cached — a keep-alive + * HTTP connection crossing a pane rebind must pick up the new pane on + * the next request. + */ +const socketSessions = new WeakMap>(); + +async function resolveSessionForSocket(socket: Socket): Promise { + const cached = socketSessions.get(socket); + if (cached) return cached; + const attempt = (async () => { + if ( + socket.remoteAddress !== "127.0.0.1" && + socket.remoteAddress !== "::1" && + socket.remoteAddress !== "::ffff:127.0.0.1" + ) { + console.log("[cdp-gateway] reject non-loopback", socket.remoteAddress); + return null; + } + const remotePort = socket.remotePort; + if (typeof remotePort !== "number") return null; + const peerPid = await resolvePeerPidFromRemotePort(remotePort, process.pid); + if (!peerPid) { + console.log( + "[cdp-gateway] peer-PID lookup failed for remotePort", + remotePort, + ); + return null; + } + const session = await resolvePidToSession(peerPid); + if (!session?.sessionId) { + console.log( + "[cdp-gateway] peerPid", + peerPid, + "did not resolve to any Superset terminal pane", + ); + return null; + } + const binding = bindingStore.getBySessionId(session.sessionId); + console.log( + "[cdp-gateway] resolved peerPid", + peerPid, + "→ session", + session.sessionId, + "binding=", + binding ? binding.paneId : "(none)", + ); + return session.sessionId; + })(); + // Only memoise positive resolutions. A negative result can be a + // transient race (claude still forking, lsof evicted) and we + // don't want to poison a long-lived keep-alive socket forever. + const result = await attempt; + if (result) socketSessions.set(socket, Promise.resolve(result)); + return result; +} + +async function resolveForSocket( + socket: Socket, +): Promise<{ paneId: string; sessionId: string } | null> { + const sessionId = await resolveSessionForSocket(socket); + if (!sessionId) return null; + const binding = bindingStore.getBySessionId(sessionId); + if (!binding) return null; + return { paneId: binding.paneId, sessionId }; +} + +/* ---------------------------------------------------------------- */ +/* M3: close active CDP connections for a session when its binding */ +/* changes, so external MCPs reconnect next tool call and */ +/* transparently pick up the new pane. */ +/* ---------------------------------------------------------------- */ + +const sessionConnections = new Map>(); + +function registerConnection(sessionId: string, ws: WebSocket): void { + let set = sessionConnections.get(sessionId); + if (!set) { + set = new Set(); + sessionConnections.set(sessionId, set); + } + set.add(ws); + ws.on("close", () => { + set?.delete(ws); + if (set && set.size === 0) sessionConnections.delete(sessionId); + }); +} + +function closeConnectionsForSession(sessionId: string): void { + const set = sessionConnections.get(sessionId); + if (!set) return; + console.log( + "[cdp-gateway] M3 closing", + set.size, + "connections for session", + sessionId, + ); + for (const ws of Array.from(set)) { + try { + ws.close(1000, "superset: binding changed, reconnect"); + } catch { + /* ignore */ + } + } + sessionConnections.delete(sessionId); +} + +let bindingChangeWatcherInstalled = false; +const lastBindingBySession = new Map(); +const paneTargetWatchersInstalled = new Set(); + +function closeConnectionsForPane(paneId: string): void { + const sessions = bindingStore + .list() + .filter((b) => b.paneId === paneId) + .map((b) => b.sessionId); + if (sessions.length === 0) return; + console.log( + "[cdp-gateway] pane-target-set-changed → closing connections for", + sessions.length, + "session(s) bound to pane", + paneId, + ); + for (const sid of sessions) closeConnectionsForSession(sid); +} + +function ensurePaneTargetWatcher(paneId: string): void { + if (paneTargetWatchersInstalled.has(paneId)) return; + paneTargetWatchersInstalled.add(paneId); + browserManager.on(`pane-target-set-changed:${paneId}`, () => { + // Any shrink / grow / primary-swap of the pane's target set + // invalidates the per-connection allow-list snapshot held by + // existing proxy connections. Force them to close so the next + // tool call reconnects with a fresh bound set. + closeConnectionsForPane(paneId); + }); +} + +function closeAllConnections(reason: string): void { + const sessionIds = Array.from(sessionConnections.keys()); + if (sessionIds.length === 0) return; + console.log( + `[cdp-gateway] ${reason} → closing all connections across ${sessionIds.length} session(s)`, + ); + for (const sid of sessionIds) closeConnectionsForSession(sid); +} + +let permissionWatcherInstalled = false; +function installPermissionWatcher(): void { + if (permissionWatcherInstalled) return; + permissionWatcherInstalled = true; + permissionStore.on("activeChanged", (presetId) => { + closeAllConnections(`permission preset changed to ${presetId}`); + }); +} + +function installBindingChangeWatcher(): void { + if (bindingChangeWatcherInstalled) return; + bindingChangeWatcherInstalled = true; + installPermissionWatcher(); + for (const b of bindingStore.list()) { + lastBindingBySession.set(b.sessionId, b.paneId); + ensurePaneTargetWatcher(b.paneId); + } + bindingStore.onChange((list) => { + const next = new Map(); + for (const b of list) { + next.set(b.sessionId, b.paneId); + ensurePaneTargetWatcher(b.paneId); + } + // Session removed → close. + for (const [sid] of lastBindingBySession) { + if (!next.has(sid)) closeConnectionsForSession(sid); + } + // Pane changed → close so client reconnects with new binding. + for (const [sid, paneId] of next) { + const prev = lastBindingBySession.get(sid); + if (prev && prev !== paneId) closeConnectionsForSession(sid); + } + lastBindingBySession.clear(); + for (const [k, v] of next) lastBindingBySession.set(k, v); + }); +} + +function makeBoundContext(resolved: { + paneId: string; + sessionId: string; +}): BoundContext | null { + const primary = browserManager.getCdpTargetId(resolved.paneId); + if (!primary) return null; + return { + paneId: resolved.paneId, + primaryTargetId: primary, + boundTargetIds: () => { + // M1/M2 bridge: ask browserManager for the pane's current + // target set. In single-tab mode this is a singleton; with + // multi-tab enabled it returns every tab. Falls back to the + // primary when no registry is available. + const all = browserManager.getPaneTargetIds?.(resolved.paneId); + if (all && all.size > 0) return all; + return new Set([primary]); + }, + onClose: (ws) => { + registerConnection(resolved.sessionId, ws); + }, + }; +} + +export function isCdpGatewayPath(pathname: string): boolean { + const p = pathname.replace(/\/$/, "") || "/"; + return ( + p === "/json" || + p === "/json/list" || + p === "/json/version" || + p === "/json/protocol" + ); +} + +export function isCdpGatewayUpgradePath(pathname: string): boolean { + return ( + /^\/devtools\/browser\/[^/]+$/.test(pathname) || + /^\/devtools\/page\/[^/]+$/.test(pathname) + ); +} + +export async function handleCdpGatewayRequest( + req: IncomingMessage, + res: ServerResponse, +): Promise { + installBindingChangeWatcher(); + const url = new URL(req.url ?? "/", "http://localhost"); + const pathname = url.pathname.replace(/\/$/, "") || "/"; + try { + const resolved = await resolveForSocket(req.socket as Socket); + if (!resolved) { + console.log( + "[cdp-gateway] 409 for", + pathname, + "| current bindings:", + bindingStore.list().map((b) => `${b.sessionId}→${b.paneId}`), + ); + sendJson(res, 409, { + error: + "このLLMセッションにはブラウザペインが接続されていません。Supersetの「Connect」で対象ペインをアタッチしてください。", + }); + return; + } + const primary = browserManager.getCdpTargetId(resolved.paneId); + if (!primary) { + sendJson(res, 503, { + error: + "バインド済みペインのCDPターゲット準備がまだ完了していません。少し待って再試行してください。", + }); + return; + } + + const host = req.headers.host ?? "127.0.0.1"; + if (pathname === "/json/version") { + const body = (await fetchUpstreamJson("/json/version")) as Record< + string, + unknown + >; + const { webSocketDebuggerUrl: _drop, ...safe } = body; + void _drop; + sendJson(res, 200, { + ...safe, + webSocketDebuggerUrl: `ws://${host}/devtools/browser/${browserWsIdFor(resolved.sessionId)}`, + }); + return; + } + if (pathname === "/json/protocol") { + const body = await fetchUpstreamJson("/json/protocol"); + sendJson(res, 200, body); + return; + } + // /json or /json/list + const raw = (await fetchUpstreamJson("/json/list")) as Array< + Record + >; + const boundSet = + browserManager.getPaneTargetIds?.(resolved.paneId) ?? new Set([primary]); + const out = raw + .filter((t) => { + const id = (t as { id?: string }).id; + return typeof id === "string" && boundSet.has(id); + }) + .map((t) => { + const id = (t as { id?: string }).id ?? primary; + // Electron exposes its / BrowserView tags as + // `type: "webview"` in /json/list. puppeteer-core's + // `browser.pages()` only counts targets whose type is + // `page`, so leaving "webview" through would make + // chrome-devtools-mcp's `list_pages` / `evaluate_script` + // return empty even when the bound pane is alive. Rewrite + // the type on the way out. + const type = (t as { type?: string }).type; + return { + ...t, + type: type === "webview" ? "page" : type, + webSocketDebuggerUrl: `ws://${host}/devtools/page/${id}`, + devtoolsFrontendUrl: `http://${host}/devtools/page/${id}`, + }; + }); + sendJson(res, 200, out); + } catch (error) { + console.error("[cdp-gateway] request error:", error); + sendJson(res, 502, { + error: error instanceof Error ? error.message : String(error), + }); + } +} + +export async function handleCdpGatewayUpgrade( + req: IncomingMessage, + socket: Duplex, + head: Buffer, +): Promise { + installBindingChangeWatcher(); + const pathname = new URL(req.url ?? "/", "http://localhost").pathname; + const isBrowserPath = /^\/devtools\/browser\/[^/]+$/.test(pathname); + const isPagePath = /^\/devtools\/page\/[^/]+$/.test(pathname); + if (!isBrowserPath && !isPagePath) { + socket.destroy(); + return; + } + const s = socket as unknown as Socket; + if ( + s.remoteAddress !== "127.0.0.1" && + s.remoteAddress !== "::1" && + s.remoteAddress !== "::ffff:127.0.0.1" + ) { + socket.destroy(); + return; + } + const resolved = await resolveForSocket(s); + if (!resolved) { + socket.destroy(); + return; + } + const ctx = makeBoundContext(resolved); + if (!ctx) { + socket.destroy(); + return; + } + const port = await resolveCdpPort(); + if (!port) { + socket.destroy(); + return; + } + if (isBrowserPath) { + const expected = `/devtools/browser/${browserWsIdFor(resolved.sessionId)}`; + if (pathname !== expected) { + socket.destroy(); + return; + } + void proxyBrowserUpgrade(req, socket, head, wss, port, ctx); + return; + } + // Page-level upgrade: ensure the requested target is in the bound set. + const m = pathname.match(/^\/devtools\/page\/([^/]+)$/); + const tid = m?.[1]; + if (!tid || !ctx.boundTargetIds().has(tid)) { + socket.destroy(); + return; + } + void proxyPageUpgrade(req, socket, head, wss, port, tid); +} diff --git a/apps/desktop/src/main/lib/browser-mcp-bridge/cdp-port.ts b/apps/desktop/src/main/lib/browser-mcp-bridge/cdp-port.ts new file mode 100644 index 00000000000..6777c8d971e --- /dev/null +++ b/apps/desktop/src/main/lib/browser-mcp-bridge/cdp-port.ts @@ -0,0 +1,55 @@ +import { existsSync, readFileSync } from "node:fs"; +import { join } from "node:path"; +import { app } from "electron"; + +/** + * Resolve the port Chromium chose for `--remote-debugging-port=0`. + * + * When Chromium opens the CDP server it writes a file called + * `DevToolsActivePort` in the user data directory. Its first line is + * the assigned port number. We read that file with a small retry so + * the resolution works even if callers query before Chromium has + * finished writing it (the file appears after `app.whenReady()` but + * right around the same time startBrowserMcpBridge fires). + */ +const DEVTOOLS_FILE = "DevToolsActivePort"; + +async function sleep(ms: number): Promise { + return new Promise((resolve) => { + setTimeout(resolve, ms); + }); +} + +function readOnce(): number | null { + const path = join(app.getPath("userData"), DEVTOOLS_FILE); + if (!existsSync(path)) return null; + try { + const contents = readFileSync(path, "utf8").trim(); + if (!contents) return null; + const firstLine = contents.split(/\r?\n/, 1)[0]?.trim(); + if (!firstLine) return null; + const port = Number.parseInt(firstLine, 10); + return Number.isFinite(port) && port > 0 ? port : null; + } catch { + return null; + } +} + +export async function resolveCdpPort( + timeoutMs = 5_000, +): Promise { + // When DESKTOP_AUTOMATION_PORT is explicitly set, trust it — Chromium + // is using that exact port, no file lookup needed. + const envPort = process.env.DESKTOP_AUTOMATION_PORT; + if (envPort) { + const parsed = Number.parseInt(envPort, 10); + if (Number.isFinite(parsed) && parsed > 0) return parsed; + } + const deadline = Date.now() + timeoutMs; + let port = readOnce(); + while (!port && Date.now() < deadline) { + await sleep(100); + port = readOnce(); + } + return port; +} diff --git a/apps/desktop/src/main/lib/browser-mcp-bridge/mcp-installer.ts b/apps/desktop/src/main/lib/browser-mcp-bridge/mcp-installer.ts new file mode 100644 index 00000000000..7393d44d62b --- /dev/null +++ b/apps/desktop/src/main/lib/browser-mcp-bridge/mcp-installer.ts @@ -0,0 +1,263 @@ +import { execFile as execFileCb } from "node:child_process"; +import { readFileSync } from "node:fs"; +import { homedir } from "node:os"; +import { join } from "node:path"; +import { promisify } from "node:util"; +import { getProcessEnvWithShellPath } from "lib/trpc/routers/workspaces/utils/shell-env"; + +function unescapeTomlBasicString(raw: string): string { + return raw.replace( + /\\(["\\bfnrt]|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8})/g, + (_, esc) => { + switch (esc) { + case "\\": + return "\\"; + case '"': + return '"'; + case "b": + return "\b"; + case "f": + return "\f"; + case "n": + return "\n"; + case "r": + return "\r"; + case "t": + return "\t"; + default: { + const hex = esc.slice(1); + const code = Number.parseInt(hex, 16); + return Number.isFinite(code) ? String.fromCodePoint(code) : ""; + } + } + }, + ); +} + +function extractTomlStrings(line: string | undefined): string[] { + if (!line) return []; + const out: string[] = []; + const re = /"((?:\\.|[^"\\])*)"|'([^']*)'/g; + for (let m = re.exec(line); m !== null; m = re.exec(line)) { + if (m[1] !== undefined) out.push(unescapeTomlBasicString(m[1])); + else if (m[2] !== undefined) out.push(m[2]); + } + return out; +} + +function parseFirstTomlString(line: string | undefined): string { + return extractTomlStrings(line)[0] ?? ""; +} + +const execFileRaw = promisify(execFileCb); + +/** + * Run a CLI (`claude` / `codex`) with the login-shell PATH merged in so + * macOS GUI launches (Dock / Finder) can still find tools installed + * under $HOME/.local/bin, homebrew, nvm, etc. that a non-shell Electron + * launch misses. + */ +async function execFile( + command: string, + args: readonly string[], +): Promise<{ stdout: string; stderr: string }> { + return execFileRaw(command, [...args], { + env: await getProcessEnvWithShellPath(), + }); +} + +const SERVER_NAME = "superset-browser"; + +export type McpTarget = "claude" | "codex"; + +export interface InstallTargetState { + /** CLI binary found on PATH. */ + cliFound: boolean; + /** + * `superset-browser` is already registered. `matchesExpected` is true + * when the registered command + args match what the Superset app would + * install today — if false, re-installing the entry will correct a + * stale legacy registration (e.g. the old `desktop-mcp` bin name). + */ + installed: boolean; + matchesExpected: boolean; + /** Raw command string currently registered, for display only. */ + currentCommand: string | null; +} + +export interface InstallState { + claude: InstallTargetState; + codex: InstallTargetState; +} + +interface ExpectedCommand { + command: string; + args: string[]; +} + +async function which(binary: string): Promise { + try { + const { stdout } = await execFile( + process.platform === "win32" ? "where" : "which", + [binary], + ); + return stdout.trim().length > 0; + } catch { + return false; + } +} + +function commandsEqual( + a: { command: string; args: string[] }, + b: ExpectedCommand, +): boolean { + if (a.command !== b.command) return false; + if (a.args.length !== b.args.length) return false; + for (let i = 0; i < a.args.length; i++) { + if (a.args[i] !== b.args[i]) return false; + } + return true; +} + +async function probeClaude( + expected: ExpectedCommand, +): Promise { + const cliFound = await which("claude"); + if (!cliFound) { + return { + cliFound: false, + installed: false, + matchesExpected: false, + currentCommand: null, + }; + } + try { + const { stdout } = await execFile("claude", ["mcp", "get", SERVER_NAME]); + const lines = stdout.split("\n"); + const commandLine = lines.find((l) => /^\s*command:/i.test(l)); + const argsLine = lines.find((l) => /^\s*args:/i.test(l)); + const command = commandLine?.split(":").slice(1).join(":").trim() ?? ""; + const argsRaw = argsLine?.split(":").slice(1).join(":").trim() ?? ""; + const args = argsRaw.length > 0 ? argsRaw.split(/\s+/) : []; + return { + cliFound: true, + installed: true, + matchesExpected: commandsEqual({ command, args }, expected), + currentCommand: [command, ...args].filter(Boolean).join(" "), + }; + } catch { + return { + cliFound: true, + installed: false, + matchesExpected: false, + currentCommand: null, + }; + } +} + +function probeCodex(expected: ExpectedCommand): InstallTargetState { + const cliFound = true; // Probed separately when install is requested. + const configPath = join(homedir(), ".codex", "config.toml"); + let contents: string; + try { + contents = readFileSync(configPath, "utf8"); + } catch { + return { + cliFound, + installed: false, + matchesExpected: false, + currentCommand: null, + }; + } + const nameRe = new RegExp( + String.raw`(^|\n)\[\s*mcp_servers\.(?:${SERVER_NAME}|["']${SERVER_NAME}["'])\s*\]\s*\n([\s\S]*?)(?=\n\[|$)`, + ); + const match = contents.match(nameRe); + if (!match) { + return { + cliFound, + installed: false, + matchesExpected: false, + currentCommand: null, + }; + } + const body = match[2] + .split("\n") + .map((l) => l.trim()) + .filter((l) => l.length > 0 && !l.startsWith("#")); + const commandLine = body.find((l) => /^command\s*=/.test(l)); + const argsLine = body.find((l) => /^args\s*=/.test(l)); + const command = parseFirstTomlString(commandLine); + const args = extractTomlStrings(argsLine); + return { + cliFound, + installed: true, + matchesExpected: commandsEqual({ command, args }, expected), + currentCommand: [command, ...args].filter(Boolean).join(" "), + }; +} + +export async function getInstallState( + expected: ExpectedCommand, +): Promise { + const [claude, codexCliFound] = await Promise.all([ + probeClaude(expected), + which("codex"), + ]); + const codexBase = probeCodex(expected); + return { + claude, + codex: { ...codexBase, cliFound: codexCliFound }, + }; +} + +async function installForClaude(expected: ExpectedCommand): Promise { + // `claude mcp add` fails if the name already exists; remove first so + // the call is idempotent and also corrects stale command paths. + await execFile("claude", ["mcp", "remove", SERVER_NAME]).catch(() => {}); + await execFile("claude", [ + "mcp", + "add", + SERVER_NAME, + "-s", + "user", + "--", + expected.command, + ...expected.args, + ]); +} + +async function installForCodex(expected: ExpectedCommand): Promise { + await execFile("codex", ["mcp", "remove", SERVER_NAME]).catch(() => {}); + await execFile("codex", [ + "mcp", + "add", + SERVER_NAME, + "--", + expected.command, + ...expected.args, + ]); +} + +export async function installMcp( + targets: readonly McpTarget[], + expected: ExpectedCommand, +): Promise> { + const results: Record = { + claude: { ok: false, error: null }, + codex: { ok: false, error: null }, + }; + for (const target of targets) { + try { + if (target === "claude") await installForClaude(expected); + else await installForCodex(expected); + results[target] = { ok: true, error: null }; + } catch (error) { + results[target] = { + ok: false, + error: error instanceof Error ? error.message : String(error), + }; + } + } + return results; +} diff --git a/apps/desktop/src/main/lib/browser-mcp-bridge/pane-resolver.ts b/apps/desktop/src/main/lib/browser-mcp-bridge/pane-resolver.ts new file mode 100644 index 00000000000..0b72949c9e4 --- /dev/null +++ b/apps/desktop/src/main/lib/browser-mcp-bridge/pane-resolver.ts @@ -0,0 +1,180 @@ +import { + getProcessCommand, + getProcessName, + getProcessTree, +} from "main/lib/terminal/port-scanner"; +import { getTerminalHostClient } from "main/lib/terminal-host/client"; +import { bindingStore } from "../../../lib/trpc/routers/browser-automation/index"; + +/** + * PID-based automatic mapping from an MCP process's PPID (the Claude / + * Codex CLI that spawned the MCP) to a Superset session and therefore + * a bound browser pane. + * + * Resolution today walks every live terminal pane's PTY process tree + * for the PPID. TODO-Agent worker resolution will be added in a + * follow-up that pipes the worker PID through the daemon-bridge IPC + * (the daemon is a separate process, so an in-process registry cannot + * reach this main-process code). + * + * Positive resolutions are cached briefly so we do not re-walk process + * trees on every tool call. Negative resolutions are NOT cached — a + * miss can be a transient listSessions failure or a brief race. + */ +export interface ResolvedSession { + sessionId: string; + kind: "todo-agent" | "terminal"; + paneId?: string; +} + +const CACHE_TTL_MS = 5_000; + +interface CacheEntry { + resolved: ResolvedSession; + at: number; +} + +const cache = new Map(); + +async function resolveFromTerminalPanes( + ppid: number, +): Promise { + let sessions: Awaited< + ReturnType["listSessions"]> + >["sessions"]; + try { + const client = getTerminalHostClient(); + const res = await client.listSessions(); + sessions = res.sessions; + } catch { + return null; + } + for (const s of sessions) { + if (!s.isAlive || typeof s.pid !== "number") continue; + // A single pane's process tree / name lookup can race with + // exit; swallow the per-pane failure and try the next one. + try { + const tree = await getProcessTree(s.pid); + if (!tree.includes(ppid)) continue; + // Accept the pane if our parent looks like claude / codex + // (either directly, or as a node-wrapped CLI we can spot by + // argv). comm alone is not enough because node CLIs commonly + // appear as comm=node with the real entrypoint in argv. + const [name, command] = await Promise.all([ + getProcessName(ppid).catch(() => ""), + getProcessCommand(ppid).catch(() => ""), + ]); + const lname = name.toLowerCase(); + const looksAgent = + lname === "claude" || + lname === "codex" || + /\b(claude|codex)(?:\.js)?\b/.test(command); + if (looksAgent || lname.includes("node")) { + return { + sessionId: `terminal:${s.paneId}`, + kind: "terminal", + paneId: s.paneId, + }; + } + } catch { + // Keep scanning — other panes may still match. + } + } + return null; +} + +export async function resolvePpidToSession( + ppid: number, +): Promise { + const cached = cache.get(ppid); + if (cached && Date.now() - cached.at < CACHE_TTL_MS) { + return cached.resolved; + } + const resolved = await resolveFromTerminalPanes(ppid); + if (resolved) cache.set(ppid, { resolved, at: Date.now() }); + return resolved; +} + +/** + * Like resolvePpidToSession but walks by process-tree *inclusion* + * rather than by PPID identity, and skips the claude/codex name check. + * + * Use this when the caller holds the PID of an arbitrary descendant of + * a Superset terminal pane (e.g. a loopback peer PID obtained from + * lsof). The MCP's immediate parent is often `npx`, `uvx`, or a node + * wrapper rather than Claude / Codex itself, so the + * looksAgent heuristic would reject the caller. The descendant has + * to have been launched from inside a Superset terminal pane anyway — + * that is itself the security boundary, and tree-inclusion is + * sufficient evidence of that. + */ +export async function resolvePidToSession( + pid: number, +): Promise { + if (!Number.isFinite(pid) || pid <= 0) return null; + const cached = cache.get(pid); + if (cached && Date.now() - cached.at < CACHE_TTL_MS) { + console.log("[pane-resolver] cache hit for pid", pid, "→", cached.resolved); + return cached.resolved; + } + let sessions: Awaited< + ReturnType["listSessions"]> + >["sessions"]; + try { + const client = getTerminalHostClient(); + const res = await client.listSessions(); + sessions = res.sessions; + } catch (err) { + console.log("[pane-resolver] listSessions failed:", err); + return null; + } + console.log( + "[pane-resolver] resolvePidToSession pid", + pid, + "sessions:", + sessions.map((s) => ({ + paneId: s.paneId, + pid: s.pid, + isAlive: s.isAlive, + })), + ); + for (const s of sessions) { + if (!s.isAlive || typeof s.pid !== "number") continue; + try { + const tree = await getProcessTree(s.pid); + console.log( + "[pane-resolver] pane", + s.paneId, + "pty pid", + s.pid, + "tree size", + tree.length, + "includes target?", + tree.includes(pid), + "tree sample:", + tree.slice(0, 10), + ); + if (!tree.includes(pid)) continue; + const resolved: ResolvedSession = { + sessionId: `terminal:${s.paneId}`, + kind: "terminal", + paneId: s.paneId, + }; + cache.set(pid, { resolved, at: Date.now() }); + return resolved; + } catch (err) { + console.log( + "[pane-resolver] getProcessTree failed for pane", + s.paneId, + err, + ); + } + } + console.log("[pane-resolver] pid", pid, "not found in any pane tree"); + return null; +} + +export function getBoundPaneForSession(sessionId: string): string | null { + const binding = bindingStore.getBySessionId(sessionId); + return binding?.paneId ?? null; +} diff --git a/apps/desktop/src/main/lib/browser-mcp-bridge/peer-pid.ts b/apps/desktop/src/main/lib/browser-mcp-bridge/peer-pid.ts new file mode 100644 index 00000000000..0f26eab084f --- /dev/null +++ b/apps/desktop/src/main/lib/browser-mcp-bridge/peer-pid.ts @@ -0,0 +1,95 @@ +import { exec } from "node:child_process"; +import { platform } from "node:os"; +import { promisify } from "node:util"; + +const execAsync = promisify(exec); + +/** + * Resolve the PID of the remote end of a loopback TCP connection on + * macOS / Linux. + * + * macOS does not expose a TCP-equivalent of LOCAL_PEERPID, so we fall + * back to `lsof` and filter by the remote side's ephemeral port. For a + * loopback connection `lsof` returns two entries (server side, client + * side); we identify the peer as the process whose *local* end matches + * `remotePort`, not `ownPid`. + * + * The remote port is a 16-bit ephemeral chosen by the kernel for each + * outbound connection, so collisions between concurrent connections + * are effectively impossible in practice on a single host. We still + * defend against race / reuse by rejecting results that could point + * back to our own process. + */ +export async function resolvePeerPidFromRemotePort( + remotePort: number, + ownPid: number, +): Promise { + if (!Number.isInteger(remotePort) || remotePort < 1 || remotePort > 65_535) { + return null; + } + const plat = platform(); + if (plat !== "darwin" && plat !== "linux") { + // Windows / other platforms: lsof is unavailable. The gateway + // is not supported on these platforms; the caller will surface + // the same 409 as "no binding" so external MCPs receive a + // clean error instead of hanging. Windows support is tracked + // separately and will plug into this resolver via a + // platform-specific implementation (netstat / GetTcpTable2). + return null; + } + try { + const cmd = `lsof -nP -iTCP:${remotePort} -sTCP:ESTABLISHED 2>/dev/null || true`; + const { stdout } = await execAsync(cmd, { + timeout: 3_000, + maxBuffer: 1024 * 1024, + }); + const text = stdout.trim(); + console.log( + "[peer-pid] lsof for remotePort", + remotePort, + "ownPid", + ownPid, + "\n", + text || "(empty)", + ); + if (!text) return null; + const lines = text.split("\n").slice(1); // skip header + for (const line of lines) { + const cols = line.trim().split(/\s+/); + if (cols.length < 9) continue; + const pid = Number.parseInt(cols[1] ?? "", 10); + if (!Number.isFinite(pid) || pid <= 0) continue; + if (pid === ownPid) continue; + const name = cols.slice(8).join(" "); + const match = name.match( + /^(?:\[::1\]|127\.0\.0\.1):(\d+)->(?:\[::1\]|127\.0\.0\.1):(\d+)/, + ); + if (!match) { + console.log( + "[peer-pid] skip entry without loopback arrow: pid", + pid, + "name", + name, + ); + continue; + } + const localPort = Number.parseInt(match[1] ?? "", 10); + console.log( + "[peer-pid] candidate pid", + pid, + "localPort", + localPort, + "remotePort target", + remotePort, + ); + if (localPort === remotePort) return pid; + } + console.log( + "[peer-pid] no entry matched localPort === remotePort", + remotePort, + ); + return null; + } catch { + return null; + } +} diff --git a/apps/desktop/src/main/lib/browser-mcp-bridge/permissions.ts b/apps/desktop/src/main/lib/browser-mcp-bridge/permissions.ts new file mode 100644 index 00000000000..3eef469eadb --- /dev/null +++ b/apps/desktop/src/main/lib/browser-mcp-bridge/permissions.ts @@ -0,0 +1,501 @@ +import { EventEmitter } from "node:events"; +import { + chmodSync, + existsSync, + mkdirSync, + readFileSync, + writeFileSync, +} from "node:fs"; +import { dirname, join } from "node:path"; +import { SUPERSET_HOME_DIR } from "../app-environment"; + +/** + * CDP permission presets. + * + * The filter proxy's deny list used to be fully hardcoded, which was + * too rigid: it broke legitimate frontend-dev workflows (can't read + * cookies from a local app under MCP automation) while still not + * covering every edge case. + * + * The new model: a small set of capability toggles, grouped into + * user-named presets. The user switches the active preset from the + * Connect modal; the filter re-reads toggles on every request and + * active proxies are force-closed so the MCP reconnects under the + * fresh scope. + * + * Some CDP methods are ALWAYS denied (Browser.close, Page.close, + * Target.createBrowserContext, …). These are not part of the toggle + * set because honouring them would trash the user-visible pane or + * escape the pane sandbox entirely; they have no legitimate use in + * automation against a Superset-managed pane. + */ + +export type PermissionToggleKey = + | "cookieRead" + | "cookieWrite" + | "storageWrite" + | "permissions" + | "privilegedSchemes" + | "downloadOverride" + | "uaOverride" + | "debugger" + | "networkIntercept"; + +export const PERMISSION_TOGGLE_KEYS: PermissionToggleKey[] = [ + "cookieRead", + "cookieWrite", + "storageWrite", + "permissions", + "privilegedSchemes", + "downloadOverride", + "uaOverride", + "debugger", + "networkIntercept", +]; + +export interface PermissionToggleMeta { + key: PermissionToggleKey; + label: string; + description: string; + /** CDP methods this toggle controls (documentation only). */ + methods: string[]; +} + +export const PERMISSION_TOGGLE_META: Record< + PermissionToggleKey, + PermissionToggleMeta +> = { + cookieRead: { + key: "cookieRead", + label: "Cookie 読み取り", + description: + "Cookie 一覧の取得を許可します。フロント開発で認証 Cookie を確認したいときに有効化。", + methods: [ + "Network.getCookies", + "Network.getAllCookies", + "Storage.getCookies", + ], + }, + cookieWrite: { + key: "cookieWrite", + label: "Cookie 書き込み / 削除", + description: + "Cookie の設定・削除を許可します。MCP でセッションを差し替えたい場合のみ。", + methods: [ + "Network.setCookie", + "Network.setCookies", + "Network.clearBrowserCookies", + "Storage.setCookie", + "Storage.setCookies", + "Storage.clearCookies", + ], + }, + storageWrite: { + key: "storageWrite", + label: "Storage 変更", + description: + "localStorage / IndexedDB / origin-scoped storage の書き換え・削除を許可します。", + methods: [ + "Storage.clearDataForOrigin", + "Storage.clearDataForStorageKey", + "DOMStorage.clear", + "DOMStorage.setDOMStorageItem", + "DOMStorage.removeDOMStorageItem", + "DOMStorage.getDOMStorageItems", + ], + }, + permissions: { + key: "permissions", + label: "ブラウザ権限付与", + description: + "通知 / 位置情報 / カメラ等のブラウザ権限を MCP から操作することを許可します。全ペイン共有なので要注意。", + methods: [ + "Browser.grantPermissions", + "Browser.resetPermissions", + "Browser.setPermission", + ], + }, + privilegedSchemes: { + key: "privilegedSchemes", + label: "特権スキーム navigate", + description: + "file:// / chrome:// / devtools:// / javascript: への createTarget を許可します。通常は http(s) のみ。", + methods: ["Target.createTarget (url scheme)"], + }, + downloadOverride: { + key: "downloadOverride", + label: "ダウンロード先上書き", + description: + "Browser.setDownloadBehavior で任意パスへのダウンロードを許可します。任意パス書き込みの起点になるので注意。", + methods: ["Browser.setDownloadBehavior"], + }, + uaOverride: { + key: "uaOverride", + label: "User-Agent 上書き", + description: + "Network.setUserAgentOverride を許可。partition-wide なので他ペインにも影響します。", + methods: ["Network.setUserAgentOverride"], + }, + debugger: { + key: "debugger", + label: "Debugger ドメイン", + description: + "JS デバッガー操作を許可。Runtime.evaluate と同等の実行権限を持ちます。", + methods: ["Debugger.*"], + }, + networkIntercept: { + key: "networkIntercept", + label: "Fetch / Network 書き換え", + description: + "Fetch.enable 系でリクエスト/レスポンスの MITM 的改ざんを許可します。", + methods: [ + "Fetch.enable", + "Fetch.continueRequest", + "Fetch.fulfillRequest", + "Fetch.failRequest", + ], + }, +}; + +export type PermissionToggles = Partial>; + +export interface PermissionPreset { + id: string; + name: string; + /** Built-in presets cannot be renamed or deleted. */ + builtin?: boolean; + toggles: PermissionToggles; +} + +export interface PermissionConfig { + presets: PermissionPreset[]; + activePresetId: string; +} + +const BUILTIN_SECURE: PermissionPreset = { + id: "builtin-secure", + name: "Secure (default)", + builtin: true, + toggles: { + cookieRead: false, + cookieWrite: false, + storageWrite: false, + permissions: false, + privilegedSchemes: false, + downloadOverride: false, + uaOverride: false, + debugger: false, + networkIntercept: false, + }, +}; + +const BUILTIN_FRONTEND_DEV: PermissionPreset = { + id: "builtin-frontend-dev", + name: "Frontend Dev", + builtin: true, + toggles: { + cookieRead: true, + cookieWrite: false, + storageWrite: true, + permissions: false, + privilegedSchemes: false, + downloadOverride: false, + uaOverride: true, + debugger: true, + networkIntercept: true, + }, +}; + +const BUILTIN_PERMISSIVE: PermissionPreset = { + id: "builtin-permissive", + name: "Permissive", + builtin: true, + toggles: { + cookieRead: true, + cookieWrite: true, + storageWrite: true, + permissions: true, + privilegedSchemes: true, + downloadOverride: true, + uaOverride: true, + debugger: true, + networkIntercept: true, + }, +}; + +export const BUILTIN_PRESETS: PermissionPreset[] = [ + BUILTIN_SECURE, + BUILTIN_FRONTEND_DEV, + BUILTIN_PERMISSIVE, +]; + +const CONFIG_PATH = join(SUPERSET_HOME_DIR, "browser-mcp-permissions.json"); + +interface StoreEvents { + change: [config: PermissionConfig]; + activeChanged: [presetId: string]; +} + +class PermissionStore extends EventEmitter { + private config: PermissionConfig = { + presets: BUILTIN_PRESETS.map((p) => ({ + ...p, + toggles: { ...p.toggles }, + })), + activePresetId: BUILTIN_SECURE.id, + }; + + constructor() { + super(); + this.load(); + } + + private load(): void { + if (!existsSync(CONFIG_PATH)) return; + try { + const raw = readFileSync(CONFIG_PATH, "utf-8"); + const parsed = JSON.parse(raw) as Partial; + const userPresets = Array.isArray(parsed.presets) + ? parsed.presets.filter( + (p): p is PermissionPreset => + !!p && typeof p.id === "string" && typeof p.name === "string", + ) + : []; + // Merge builtin + user. Builtins always come first and + // override any stored copy (so we can update defaults + // without the user's file sticking on stale values). + const byId = new Map(); + for (const p of BUILTIN_PRESETS) { + byId.set(p.id, { ...p, toggles: { ...p.toggles } }); + } + for (const p of userPresets) { + if (byId.has(p.id) && byId.get(p.id)?.builtin) continue; + byId.set(p.id, { + ...p, + builtin: false, + toggles: { ...p.toggles }, + }); + } + const active = + typeof parsed.activePresetId === "string" && + byId.has(parsed.activePresetId) + ? parsed.activePresetId + : BUILTIN_SECURE.id; + this.config = { + presets: Array.from(byId.values()), + activePresetId: active, + }; + } catch (error) { + console.warn("[permissions] failed to load config:", error); + } + } + + private persist(): void { + try { + mkdirSync(dirname(CONFIG_PATH), { recursive: true }); + writeFileSync(CONFIG_PATH, JSON.stringify(this.config, null, 2), { + mode: 0o600, + }); + try { + chmodSync(CONFIG_PATH, 0o600); + } catch { + /* best effort */ + } + } catch (error) { + console.warn("[permissions] failed to persist config:", error); + } + } + + getConfig(): PermissionConfig { + return { + presets: this.config.presets.map((p) => ({ + ...p, + toggles: { ...p.toggles }, + })), + activePresetId: this.config.activePresetId, + }; + } + + getActive(): PermissionPreset { + const found = this.config.presets.find( + (p) => p.id === this.config.activePresetId, + ); + return found ?? BUILTIN_SECURE; + } + + getActiveToggles(): PermissionToggles { + return this.getActive().toggles; + } + + setActive(presetId: string): void { + if (!this.config.presets.some((p) => p.id === presetId)) { + throw new Error(`Unknown preset id: ${presetId}`); + } + if (this.config.activePresetId === presetId) return; + this.config.activePresetId = presetId; + this.persist(); + this.emit("activeChanged", presetId); + this.emit("change", this.getConfig()); + } + + savePreset(input: { + id?: string; + name: string; + toggles: PermissionToggles; + }): PermissionPreset { + const id = input.id ?? `user-${Date.now().toString(36)}`; + const existing = this.config.presets.find((p) => p.id === id); + if (existing?.builtin) { + throw new Error(`Cannot modify built-in preset: ${id}`); + } + const next: PermissionPreset = { + id, + name: input.name, + builtin: false, + toggles: { ...input.toggles }, + }; + if (existing) { + this.config.presets = this.config.presets.map((p) => + p.id === id ? next : p, + ); + } else { + this.config.presets = [...this.config.presets, next]; + } + this.persist(); + this.emit("change", this.getConfig()); + // If the edited preset is the active one, notify filter to + // re-close connections so the MCP picks up new toggles. + if (this.config.activePresetId === id) { + this.emit("activeChanged", id); + } + return next; + } + + deletePreset(id: string): void { + const existing = this.config.presets.find((p) => p.id === id); + if (!existing) return; + if (existing.builtin) { + throw new Error(`Cannot delete built-in preset: ${id}`); + } + this.config.presets = this.config.presets.filter((p) => p.id !== id); + if (this.config.activePresetId === id) { + this.config.activePresetId = BUILTIN_SECURE.id; + this.emit("activeChanged", this.config.activePresetId); + } + this.persist(); + this.emit("change", this.getConfig()); + } +} + +export const permissionStore = new PermissionStore(); + +/** + * Classify a CDP method into (a) always-denied, (b) toggle-gated, + * or (c) always-allowed. + * + * The filter proxy calls this before forwarding any message and + * consults the current active preset's toggles. Returning + * {allowed:false} causes the filter to reply with -32000. + */ +export interface PermissionCheckResult { + allowed: boolean; + reason?: string; + /** + * When false, indicates the method is gated by a toggle that is + * currently OFF. UI surfaces this so the user can flip the + * relevant preset toggle. + */ + togglesKey?: PermissionToggleKey; +} + +const ALWAYS_DENIED = new Set([ + "Target.createBrowserContext", + "Target.disposeBrowserContext", + // Target.getBrowserContexts is READ-ONLY and required by + // puppeteer.connect() bootstrap — NOT denied. + "Target.setRemoteLocations", + "Target.exposeDevToolsProtocol", + "Browser.close", + "Browser.crash", + "Browser.crashGpuProcess", + "Page.setWebLifecycleState", + "Page.close", +]); + +const TOGGLE_BY_METHOD: Record = { + // cookieRead + "Network.getCookies": "cookieRead", + "Network.getAllCookies": "cookieRead", + "Storage.getCookies": "cookieRead", + // cookieWrite + "Network.setCookie": "cookieWrite", + "Network.setCookies": "cookieWrite", + "Network.clearBrowserCookies": "cookieWrite", + "Storage.setCookie": "cookieWrite", + "Storage.setCookies": "cookieWrite", + "Storage.clearCookies": "cookieWrite", + // storageWrite + "Storage.clearDataForOrigin": "storageWrite", + "Storage.clearDataForStorageKey": "storageWrite", + "DOMStorage.clear": "storageWrite", + "DOMStorage.setDOMStorageItem": "storageWrite", + "DOMStorage.removeDOMStorageItem": "storageWrite", + "DOMStorage.getDOMStorageItems": "storageWrite", + // permissions + "Browser.grantPermissions": "permissions", + "Browser.resetPermissions": "permissions", + "Browser.setPermission": "permissions", + // downloadOverride + "Browser.setDownloadBehavior": "downloadOverride", + // uaOverride + "Network.setUserAgentOverride": "uaOverride", + // networkIntercept + "Fetch.enable": "networkIntercept", + "Fetch.continueRequest": "networkIntercept", + "Fetch.fulfillRequest": "networkIntercept", + "Fetch.failRequest": "networkIntercept", + "Fetch.continueResponse": "networkIntercept", + "Fetch.continueWithAuth": "networkIntercept", +}; + +export function checkMethodPermitted( + method: string, + toggles: PermissionToggles, +): PermissionCheckResult { + if (ALWAYS_DENIED.has(method)) { + return { + allowed: false, + reason: `${method} is always denied by the Superset CDP filter (pane lifecycle / scope escape).`, + }; + } + if (method.startsWith("Debugger.")) { + if (!toggles.debugger) { + return { + allowed: false, + reason: `${method} requires the Debugger permission toggle.`, + togglesKey: "debugger", + }; + } + return { allowed: true }; + } + const tog = TOGGLE_BY_METHOD[method]; + if (!tog) return { allowed: true }; + if (!toggles[tog]) { + return { + allowed: false, + reason: `${method} requires the "${PERMISSION_TOGGLE_META[tog].label}" permission toggle.`, + togglesKey: tog, + }; + } + return { allowed: true }; +} + +/** + * Scheme check for Target.createTarget url param. Returns true when + * the current toggles allow privileged schemes (file:, chrome:, + * devtools:, javascript:, data:). http(s) and about:blank are always + * allowed. + */ +export function isPrivilegedSchemeAllowed(toggles: PermissionToggles): boolean { + return toggles.privilegedSchemes === true; +} diff --git a/apps/desktop/src/main/lib/browser-mcp-bridge/server.ts b/apps/desktop/src/main/lib/browser-mcp-bridge/server.ts new file mode 100644 index 00000000000..d93b84a96f3 --- /dev/null +++ b/apps/desktop/src/main/lib/browser-mcp-bridge/server.ts @@ -0,0 +1,305 @@ +import { randomBytes } from "node:crypto"; +import { chmodSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; +import { + createServer, + type IncomingMessage, + type Server, + type ServerResponse, +} from "node:http"; +import { dirname, join } from "node:path"; +import { app } from "electron"; +import { SUPERSET_HOME_DIR } from "../app-environment"; +import { browserManager } from "../browser/browser-manager"; +import { + ensureGlobalBrowserUseConfig, + handleCdpGatewayRequest, + handleCdpGatewayUpgrade, + isCdpGatewayPath, + isCdpGatewayUpgradePath, +} from "./cdp-gateway"; +import { resolveCdpPort } from "./cdp-port"; +import { getBoundPaneForSession, resolvePpidToSession } from "./pane-resolver"; + +/** + * HTTP bridge between the `packages/superset-browser-mcp` MCP server and + * this Electron app. The MCP discovers the app via a runtime info file at + * `${SUPERSET_HOME_DIR}/browser-mcp.json` (workspace-scoped) — this lets + * multiple Superset instances with different `SUPERSET_WORKSPACE_NAME` + * values coexist without overwriting each other's port/secret. + * + * Scope of this bridge is intentionally small: the MCP only needs to + * resolve its PPID → Superset LLM session → bound paneId → metadata + * about that pane. Actual browser automation (click / navigate / DOM + * inspection / screenshot) is delegated to external browser MCPs via + * the per-pane filtered CDP endpoint (see ./plan.md in the repo root). + * This file should stay small; if you are about to add tool-like + * endpoints here, you're fighting the plan. + */ + +const RUNTIME_INFO_PATH = join(SUPERSET_HOME_DIR, "browser-mcp.json"); + +/** + * Preferred loopback port for the bridge. Chosen in the IANA + * dynamic-port range where browser dev tools are unlikely to collide + * (9000-series is taken by Chrome remote debugging, 3000/5173 by dev + * servers, 8080 by everything, etc.). Persisted to browser-mcp.json so + * the same port is reused on restart — which lets the CDP URL that an + * external MCP was registered with stay valid across Superset launches. + */ +const PREFERRED_BRIDGE_PORT = 47834; + +async function tryListen(server: Server, port: number): Promise { + return new Promise((resolve) => { + const onError = (err: NodeJS.ErrnoException): void => { + server.off("error", onError); + if (err.code === "EADDRINUSE") resolve(null); + else resolve(null); + }; + server.once("error", onError); + server.listen(port, "127.0.0.1", () => { + server.off("error", onError); + const address = server.address(); + if (!address || typeof address === "string") { + resolve(null); + return; + } + resolve(address.port); + }); + }); +} + +function readPersistedPort(): number | null { + try { + const raw = readFileSync(RUNTIME_INFO_PATH, "utf8"); + const parsed = JSON.parse(raw) as { port?: number }; + if ( + typeof parsed.port === "number" && + Number.isInteger(parsed.port) && + parsed.port > 0 && + parsed.port < 65_536 + ) { + return parsed.port; + } + } catch { + /* no prior state */ + } + return null; +} + +async function listenPreferringStablePort(server: Server): Promise { + // The gateway URL is now always http://127.0.0.1:47834, so prefer + // that first even if an older build persisted a different port in + // browser-mcp.json (e.g. 49939 from the per-session port era). Only + // fall back to the persisted value if 47834 is taken, then to a + // kernel-assigned port. + const previous = readPersistedPort(); + const candidates = [PREFERRED_BRIDGE_PORT, previous].filter( + (p, i, arr): p is number => typeof p === "number" && arr.indexOf(p) === i, + ); + for (const candidate of candidates) { + const bound = await tryListen(server, candidate); + if (bound) return bound; + } + const bound = await tryListen(server, 0); + if (bound) return bound; + throw new Error("browser-mcp-bridge: could not bind any loopback port"); +} + +async function resolvePaneFromRequest( + req: IncomingMessage, +): Promise< + { paneId: string; sessionId: string } | { error: string; status: number } +> { + const ppidHeader = req.headers["x-superset-mcp-ppid"]; + const ppid = + typeof ppidHeader === "string" ? Number.parseInt(ppidHeader, 10) : NaN; + if (!Number.isFinite(ppid) || ppid <= 0) { + return { error: "missing x-superset-mcp-ppid header", status: 400 }; + } + const resolved = await resolvePpidToSession(ppid); + if (!resolved) { + return { + error: + "Could not map this MCP to a Superset LLM session. Make sure Claude / Codex is running inside a Superset terminal pane.", + status: 404, + }; + } + const paneId = getBoundPaneForSession(resolved.sessionId); + if (!paneId) { + return { + error: `No browser pane is bound to session ${resolved.sessionId}. Open the Connect dialog in the Superset UI to pick one.`, + status: 409, + }; + } + return { paneId, sessionId: resolved.sessionId }; +} + +function send(res: ServerResponse, status: number, body: unknown): void { + res.statusCode = status; + res.setHeader("content-type", "application/json"); + res.end(JSON.stringify(body)); +} + +interface BridgeHandle { + port: number; + secret: string; + stop: () => Promise; +} + +let current: BridgeHandle | null = null; + +export function getBrowserMcpBridge(): BridgeHandle | null { + return current; +} + +export async function startBrowserMcpBridge(): Promise { + if (current) return current; + const secret = randomBytes(24).toString("hex"); + + const server: Server = createServer(async (req, res) => { + try { + // Require loopback for every route. + const remote = req.socket.remoteAddress ?? ""; + if ( + remote !== "127.0.0.1" && + remote !== "::1" && + remote !== "::ffff:127.0.0.1" + ) { + return send(res, 403, { error: "loopback only" }); + } + + const url = new URL(req.url ?? "/", "http://localhost"); + + // CDP gateway routes are unauthenticated (external CDP MCPs + // compose URLs that drop the path, so no secret can survive). + // Their capability is the peer-PID tree-descendant check. + if (isCdpGatewayPath(url.pathname)) { + return handleCdpGatewayRequest(req, res); + } + + const auth = req.headers.authorization ?? ""; + if (auth !== `Bearer ${secret}`) { + return send(res, 401, { error: "bad token" }); + } + + if (req.method === "POST" && url.pathname === "/mcp/register") { + return send(res, 200, { ok: true }); + } + + if (req.method === "GET" && url.pathname === "/mcp/cdp-endpoint") { + const resolved = await resolvePaneFromRequest(req); + if ("error" in resolved) + return send(res, resolved.status, { error: resolved.error }); + const targetId = browserManager.getCdpTargetId(resolved.paneId); + if (!targetId) { + return send(res, 503, { + error: + "CDP targetId for this pane has not been captured yet. Give the pane a moment to finish loading and retry.", + }); + } + const cdpPort = await resolveCdpPort(); + if (!cdpPort) { + return send(res, 503, { + error: + "Chromium CDP port is not available. This build did not start with --remote-debugging-port.", + }); + } + const wc = browserManager.getWebContents(resolved.paneId); + // Since M1 the CDP data plane lives on this same bridge + // port (47834); the gateway routes each incoming + // connection by peer-PID. One stable URL works for every + // session and survives restarts / rebindings. + const gatewayPort = await import("./server").then( + (m) => m.getBrowserMcpBridge()?.port ?? port, + ); + return send(res, 200, { + paneId: resolved.paneId, + sessionId: resolved.sessionId, + targetId, + cdpPort, + httpBase: `http://127.0.0.1:${gatewayPort}`, + webSocketDebuggerUrl: `ws://127.0.0.1:${gatewayPort}/devtools/page/${targetId}`, + url: wc?.getURL() ?? null, + title: wc?.getTitle() ?? null, + filtered: true, + }); + } + + if (req.method === "GET" && url.pathname === "/mcp/binding") { + const resolved = await resolvePaneFromRequest(req); + if ("error" in resolved) { + return send(res, 200, { + bound: false, + paneId: null, + sessionId: null, + url: null, + title: null, + reason: resolved.error, + }); + } + const wc = browserManager.getWebContents(resolved.paneId); + return send(res, 200, { + bound: true, + paneId: resolved.paneId, + sessionId: resolved.sessionId, + url: wc?.getURL() ?? null, + title: wc?.getTitle() ?? null, + }); + } + + return send(res, 404, { error: "not found" }); + } catch (error) { + console.error("[browser-mcp-bridge]", error); + return send(res, 500, { + error: error instanceof Error ? error.message : String(error), + }); + } + }); + + // CDP gateway WS upgrades land on /devtools/browser/ and + // /devtools/page/. Route them to the gateway before the default + // socket.destroy() path kicks in. + server.on("upgrade", (req, socket, head) => { + const pathname = new URL(req.url ?? "/", "http://localhost").pathname; + if (isCdpGatewayUpgradePath(pathname)) { + void handleCdpGatewayUpgrade(req, socket, head); + return; + } + socket.destroy(); + }); + + const port = await listenPreferringStablePort(server); + + // One-shot: write the global browser-use config pointing at this + // gateway. Same file for every session; session routing happens + // per connection via peer-PID. + ensureGlobalBrowserUseConfig(port); + + mkdirSync(dirname(RUNTIME_INFO_PATH), { recursive: true }); + writeFileSync(RUNTIME_INFO_PATH, JSON.stringify({ port, secret }, null, 2), { + mode: 0o600, + }); + // writeFileSync's mode only applies to new files — an existing + // runtime file from a previous run could still be world-readable. + // Force 0600 on every start so the shared secret stays locked down. + try { + chmodSync(RUNTIME_INFO_PATH, 0o600); + } catch { + /* best-effort */ + } + + app.on("will-quit", () => { + server.close(); + }); + + current = { + port, + secret, + stop: () => + new Promise((resolve) => { + server.close(() => resolve()); + }), + }; + console.log(`[browser-mcp-bridge] listening on 127.0.0.1:${port}`); + return current; +} diff --git a/apps/desktop/src/main/lib/browser/browser-identity-manager.ts b/apps/desktop/src/main/lib/browser/browser-identity-manager.ts new file mode 100644 index 00000000000..9c2d4085387 --- /dev/null +++ b/apps/desktop/src/main/lib/browser/browser-identity-manager.ts @@ -0,0 +1,76 @@ +import { session } from "electron"; + +const APP_BROWSER_PARTITION = "persist:superset"; + +function getChromeVersion(): string { + return process.versions.chrome ?? "140.0.0.0"; +} + +function getChromeMajorVersion(): string { + return getChromeVersion().split(".")[0] ?? "140"; +} + +function getChromeLikeUserAgent(userAgent: string): string { + return userAgent.replace(/\sElectron\/[^\s]+/g, "").trim(); +} + +function getClientHintPlatform(): string { + switch (process.platform) { + case "darwin": + return "macOS"; + case "win32": + return "Windows"; + default: + return "Linux"; + } +} + +function setHeader( + headers: Record, + name: string, + value: string, +): void { + const existingKey = Object.keys(headers).find( + (headerName) => headerName.toLowerCase() === name.toLowerCase(), + ); + if (existingKey) { + headers[existingKey] = value; + return; + } + + headers[name] = value; +} + +let initialized = false; + +export function initializeBrowserIdentityManager(): void { + if (initialized) { + return; + } + + initialized = true; + + const browserSession = session.fromPartition(APP_BROWSER_PARTITION); + const chromeVersion = getChromeVersion(); + const chromeMajorVersion = getChromeMajorVersion(); + const clientHintPlatform = getClientHintPlatform(); + const secChUa = `"Google Chrome";v="${chromeMajorVersion}", "Chromium";v="${chromeMajorVersion}", "Not_A Brand";v="24"`; + const secChUaFullVersionList = `"Google Chrome";v="${chromeVersion}", "Chromium";v="${chromeVersion}", "Not_A Brand";v="24.0.0.0"`; + + browserSession.webRequest.onBeforeSendHeaders((details, callback) => { + const headers = { ...details.requestHeaders }; + const originalUserAgent = + headers["User-Agent"] ?? + headers["user-agent"] ?? + `Mozilla/5.0 Chrome/${chromeVersion}`; + + setHeader(headers, "User-Agent", getChromeLikeUserAgent(originalUserAgent)); + setHeader(headers, "Sec-CH-UA", secChUa); + setHeader(headers, "Sec-CH-UA-Mobile", "?0"); + setHeader(headers, "Sec-CH-UA-Platform", `"${clientHintPlatform}"`); + setHeader(headers, "Sec-CH-UA-Full-Version", `"${chromeVersion}"`); + setHeader(headers, "Sec-CH-UA-Full-Version-List", secChUaFullVersionList); + + callback({ requestHeaders: headers }); + }); +} diff --git a/apps/desktop/src/main/lib/browser/browser-manager.ts b/apps/desktop/src/main/lib/browser/browser-manager.ts index cbb884815b3..7242e253db7 100644 --- a/apps/desktop/src/main/lib/browser/browser-manager.ts +++ b/apps/desktop/src/main/lib/browser/browser-manager.ts @@ -1,5 +1,14 @@ import { EventEmitter } from "node:events"; -import { clipboard, Menu, webContents } from "electron"; +import { basename, join } from "node:path"; +import { + app, + type BrowserWindow, + clipboard, + dialog, + Menu, + nativeTheme, + webContents, +} from "electron"; import { safeOpenExternal } from "main/lib/safe-url"; interface ConsoleEntry { @@ -10,6 +19,56 @@ interface ConsoleEntry { const MAX_CONSOLE_ENTRIES = 500; +function buildElementPathScript(x: number, y: number): string { + return `(function() { + var el = document.elementFromPoint(${x}, ${y}); + if (!el) return null; + function getCssSelector(element) { + var parts = []; + var current = element; + while (current && current.nodeType === 1 && current !== document.documentElement) { + var sel = current.tagName.toLowerCase(); + if (current.id) { + parts.unshift('#' + CSS.escape(current.id)); + return parts.join(' > '); + } + var classes = Array.prototype.slice.call(current.classList, 0, 3).map(function(c) { return CSS.escape(c); }); + if (classes.length > 0) sel += '.' + classes.join('.'); + var parent = current.parentElement; + if (parent) { + var sameTag = Array.prototype.filter.call(parent.children, function(s) { return s.tagName === current.tagName; }); + if (sameTag.length > 1) sel += ':nth-of-type(' + (Array.prototype.indexOf.call(sameTag, current) + 1) + ')'; + } + parts.unshift(sel); + current = current.parentElement; + if (parts.length >= 5) break; + } + return parts.join(' > '); + } + function getXPath(element) { + if (element.id) return '//*[@id="' + element.id + '"]'; + var parts = []; + var current = element; + var truncated = false; + while (current && current.nodeType === 1) { + var tag = current.tagName.toLowerCase(); + var parent = current.parentElement; + if (!parent) { parts.unshift(tag); break; } + var sameTag = Array.prototype.filter.call(parent.children, function(s) { return s.tagName === current.tagName; }); + if (sameTag.length > 1) { + parts.unshift(tag + '[' + (Array.prototype.indexOf.call(sameTag, current) + 1) + ']'); + } else { + parts.unshift(tag); + } + current = parent; + if (parts.length >= 8) { truncated = true; break; } + } + return (truncated ? '//' : '/') + parts.join('/'); + } + return { cssSelector: getCssSelector(el), xpath: getXPath(el) }; + })()`; +} + function sanitizeUrl(url: string): string { if (/^https?:\/\//i.test(url) || url.startsWith("about:")) { return url; @@ -23,17 +82,53 @@ function sanitizeUrl(url: string): string { return `https://www.google.com/search?q=${encodeURIComponent(url)}`; } +function getChromeLikeUserAgent(userAgent: string): string { + return userAgent.replace(/\sElectron\/[^\s]+/g, "").trim(); +} + class BrowserManager extends EventEmitter { private paneWebContentsIds = new Map(); + /** + * Chromium CDP targetId for each pane's webview, captured once per + * register(). The browser-mcp bridge uses this to hand out a + * per-pane `ws://…/devtools/page/` URL without probing + * `/json/list` on every tool call. Stable for the lifetime of the + * underlying webContents. + */ + private paneTargetIds = new Map(); + /** + * Secondary per-pane CDP targetIds owned by "tabs" beyond the + * pane's primary webview. M2 multi-tab wiring populates these; M1 + * leaves the map empty and the filter runs in single-target mode. + */ + private paneTabTargetIds = new Map>(); + private paneTabTargetIdByKey = new Map(); + private paneTabWebContents = new Map(); + private paneIdMarkerListeners = new Map void>(); private consoleLogs = new Map(); private consoleListeners = new Map void>(); private contextMenuListeners = new Map void>(); + private fullscreenListeners = new Map void>(); + private popupListeners = new Map void>(); + private findListeners = new Map void>(); + /** Track which pane is currently in HTML fullscreen */ + private fullscreenPaneId: string | null = null; + + getFullscreenPaneId(): string | null { + return this.fullscreenPaneId; + } register(paneId: string, webContentsId: number): void { // Clean up previous listeners if re-registering with a new webContentsId const prevId = this.paneWebContentsIds.get(paneId); if (prevId != null && prevId !== webContentsId) { - for (const map of [this.consoleListeners, this.contextMenuListeners]) { + for (const map of [ + this.consoleListeners, + this.contextMenuListeners, + this.fullscreenListeners, + this.popupListeners, + this.findListeners, + ]) { const cleanup = map.get(paneId); if (cleanup) { cleanup(); @@ -42,32 +137,100 @@ class BrowserManager extends EventEmitter { } } this.paneWebContentsIds.set(paneId, webContentsId); + // Invalidate any stale targetId captured from a previous + // webContents so /mcp/cdp-endpoint never returns a URL pointing + // at a dead target while the async recapture is in flight. + this.paneTargetIds.delete(paneId); const wc = webContents.fromId(webContentsId); if (wc) { - // Keep throttling enabled so parked/offscreen persistent webviews don't - // run at full speed in the background. - wc.setBackgroundThrottling(true); - wc.setWindowOpenHandler(({ url }) => { - if (url && url !== "about:blank") { - this.emit(`new-window:${paneId}`, url); + // External CDP MCPs (chrome-devtools-mcp, browser-use) may + // drive this primary webview while its BrowserPane is + // off-screen, the parent BrowserWindow is minimised, or + // the pane is obscured. Chromium's LifecycleWatcher and + // many sites gate work on document.hidden / + // requestAnimationFrame / IntersectionObserver, all of + // which freeze under background throttling. Keep throttling + // off so automation stays responsive; the perf cost of an + // idle pane is negligible. + wc.setBackgroundThrottling(false); + wc.setWindowOpenHandler(({ url, disposition }) => { + if (!url || url === "about:blank") { + return { action: "deny" as const }; + } + + // window.open() calls (OAuth popups, auth flows, etc.) — allow as a + // real child BrowserWindow so window.opener / postMessage work. + if (disposition === "new-window") { + return { + action: "allow" as const, + overrideBrowserWindowOptions: { + width: 500, + height: 700, + autoHideMenuBar: true, + backgroundColor: nativeTheme.shouldUseDarkColors + ? "#252525" + : "#ffffff", + webPreferences: { + partition: "persist:superset", + }, + }, + }; } + + // Regular target="_blank" / window.open() — open as a + // new secondary tab in the same pane so it stays within + // the MCP's bound scope (the pane's CDP session sees it + // via paneTabTargetIds and the user can drive it with + // the same binding). This matches Chrome's default + // target="_blank" UX (new tab, not new window). The old + // split-pane / workspace-tab behaviours are still + // reachable via the "Open in Split" context menu. + this.emit(`create-tab-requested:${paneId}`, { url }); return { action: "deny" as const }; }); + this.setupPopupWindowHandler(paneId, wc); + this.setupFullscreenHandler(paneId, wc); this.setupConsoleCapture(paneId, wc); this.setupContextMenu(paneId, wc); + this.setupFindInPage(paneId, wc); + this.setupPaneIdMarker(paneId, wc); + this.setupJsDialogHandler(paneId, wc); + this.setupDownloadHandler(paneId, wc); + void this.captureCdpTargetId(paneId, wc); } } unregister(paneId: string): void { - for (const map of [this.consoleListeners, this.contextMenuListeners]) { + for (const map of [ + this.consoleListeners, + this.contextMenuListeners, + this.fullscreenListeners, + this.popupListeners, + this.findListeners, + this.paneIdMarkerListeners, + ]) { const cleanup = map.get(paneId); if (cleanup) { cleanup(); map.delete(paneId); } } + if (this.fullscreenPaneId === paneId) { + this.fullscreenPaneId = null; + } this.paneWebContentsIds.delete(paneId); + this.paneTargetIds.delete(paneId); + this.paneTabTargetIds.delete(paneId); + // Sweep per-tab maps keyed by paneId::tabId. + const tabPrefix = `${paneId}::`; + for (const key of [...this.paneTabTargetIdByKey.keys()]) { + if (key.startsWith(tabPrefix)) this.paneTabTargetIdByKey.delete(key); + } + for (const key of [...this.paneTabWebContents.keys()]) { + if (key.startsWith(tabPrefix)) this.paneTabWebContents.delete(key); + } this.consoleLogs.delete(paneId); + this.emit(`pane-target-set-changed:${paneId}`, { action: "clear" }); } unregisterAll(): void { @@ -84,6 +247,358 @@ class BrowserManager extends EventEmitter { return wc; } + /** + * Chromium CDP targetId for the pane's webview, or null if we have + * not finished capturing it yet. The browser-mcp bridge uses this to + * hand external automation MCPs a per-pane ws://.../devtools/page/ + * URL. + */ + getCdpTargetId(paneId: string): string | null { + return this.paneTargetIds.get(paneId) ?? null; + } + + /** + * Return the full set of Chromium CDP targetIds that belong to a + * pane. For single-tab panes this is the singleton primary + * targetId; when multi-tab support is wired in M2 the registry + * will populate additional tab ids through addPaneTabTarget / + * removePaneTabTarget (see below). Returning undefined lets the + * gateway fall back to the primary-only path. + */ + getPaneTargetIds(paneId: string): Set | undefined { + const extras = this.paneTabTargetIds.get(paneId); + const primary = this.paneTargetIds.get(paneId); + if (!primary && !extras) return undefined; + const set = new Set(); + if (primary) set.add(primary); + if (extras) for (const id of extras) set.add(id); + return set; + } + + addPaneTabTarget(paneId: string, targetId: string): void { + let set = this.paneTabTargetIds.get(paneId); + if (!set) { + set = new Set(); + this.paneTabTargetIds.set(paneId, set); + } + set.add(targetId); + console.log( + "[browser-manager] addPaneTabTarget", + paneId, + targetId, + "now", + Array.from(set), + ); + // Notify any in-flight Target.createTarget waiters in the gateway. + this.emit(`tab-target-added:${paneId}`, targetId); + this.emit(`pane-target-set-changed:${paneId}`, { action: "add", targetId }); + } + + removePaneTabTarget(paneId: string, targetId: string): void { + this.paneTabTargetIds.get(paneId)?.delete(targetId); + console.log("[browser-manager] removePaneTabTarget", paneId, targetId); + this.emit(`pane-target-set-changed:${paneId}`, { + action: "remove", + targetId, + }); + } + + listPanesWithCdpTargets(): Array<{ paneId: string; targetId: string }> { + return Array.from(this.paneTargetIds.entries()).map( + ([paneId, targetId]) => ({ paneId, targetId }), + ); + } + + /** + * Inject `window.__supersetPaneId = ''` into every top frame + * of this pane, including after navigation. External CDP clients + * (chrome-devtools-mcp etc.) that enumerate /json/list use this as + * the ground-truth pane identifier via Runtime.evaluate. + */ + private setupPaneIdMarker(paneId: string, wc: Electron.WebContents): void { + const literal = JSON.stringify(paneId); + const inject = (): void => { + if (wc.isDestroyed()) return; + // executeJavaScript returns a promise we don't need to await. + void wc + .executeJavaScript(`window.__supersetPaneId = ${literal};`, false) + .catch(() => { + /* Pages like about:blank or in the middle of a redirect + can reject — retry on the next did-navigate. */ + }); + }; + wc.on("did-navigate", inject); + wc.on("did-navigate-in-page", inject); + wc.on("did-finish-load", inject); + inject(); + this.paneIdMarkerListeners.set(paneId, () => { + wc.off("did-navigate", inject); + wc.off("did-navigate-in-page", inject); + wc.off("did-finish-load", inject); + }); + } + + /** + * Briefly attach the Electron CDP debugger to this pane so we can + * read `Target.getTargetInfo` and remember the Chromium-assigned + * targetId. Detach right after so we do not conflict with external + * CDP clients the user wires up later. + */ + private async captureCdpTargetId( + paneId: string, + wc: Electron.WebContents, + ): Promise { + if (wc.isDestroyed()) return; + const expectedWebContentsId = wc.id; + let attachedHere = false; + try { + if (!wc.debugger.isAttached()) { + wc.debugger.attach("1.3"); + attachedHere = true; + } + const info = (await wc.debugger.sendCommand("Target.getTargetInfo")) as { + targetInfo?: { targetId?: string }; + }; + const targetId = info?.targetInfo?.targetId; + // Late-resolution guard: if the pane was unregistered or + // re-registered with a different webContents while we were + // awaiting, do not overwrite the current cache with stale data. + const currentId = this.paneWebContentsIds.get(paneId); + if ( + typeof targetId === "string" && + targetId.length > 0 && + currentId === expectedWebContentsId + ) { + const previous = this.paneTargetIds.get(paneId); + this.paneTargetIds.set(paneId, targetId); + console.log( + "[browser-manager] captured primary targetId pane", + paneId, + "wc", + expectedWebContentsId, + "targetId", + targetId, + "previous", + previous, + ); + if (previous !== targetId) { + this.emit(`pane-target-set-changed:${paneId}`, { + action: "primary", + targetId, + }); + } + } else { + console.log( + "[browser-manager] discarded captured targetId pane", + paneId, + "expectedWc", + expectedWebContentsId, + "currentWc", + currentId, + "targetId", + targetId, + ); + } + } catch (error) { + console.warn( + `[browser-manager] failed to capture CDP targetId for pane ${paneId}:`, + error, + ); + } finally { + if (attachedHere) { + try { + wc.debugger.detach(); + } catch { + /* already detached */ + } + } + } + } + + /** + * Register a secondary tab webContents for a pane. Captures its + * CDP targetId and adds it to the pane's tab target set so the + * gateway exposes it via Target.getTargets / filter. + */ + async registerTab( + paneId: string, + tabId: string, + webContentsId: number, + ): Promise { + const wc = webContents.fromId(webContentsId); + if (!wc || wc.isDestroyed()) return; + // Tabs are routinely off-screen while another tab is active. + // External CDP MCPs (browser-use, chrome-devtools-mcp) need + // the inactive tab's webContents to keep timers / network / + // JS running so navigation doesn't stall waiting for visibility. + try { + wc.setBackgroundThrottling(false); + } catch { + /* best-effort */ + } + this.paneTabWebContents.set(this.tabKey(paneId, tabId), webContentsId); + let attached = false; + try { + if (!wc.debugger.isAttached()) { + wc.debugger.attach("1.3"); + attached = true; + } + const info = (await wc.debugger.sendCommand("Target.getTargetInfo")) as { + targetInfo?: { targetId?: string }; + }; + const targetId = info?.targetInfo?.targetId; + if (typeof targetId === "string" && targetId.length > 0) { + this.paneTabTargetIdByKey.set(this.tabKey(paneId, tabId), targetId); + this.addPaneTabTarget(paneId, targetId); + } + } catch (error) { + console.warn( + `[browser-manager] failed to capture tab CDP targetId for pane ${paneId} tab ${tabId}:`, + error, + ); + } finally { + if (attached) { + try { + wc.debugger.detach(); + } catch { + /* ignore */ + } + } + } + } + + unregisterTab(paneId: string, tabId: string): void { + const key = this.tabKey(paneId, tabId); + const targetId = this.paneTabTargetIdByKey.get(key); + if (targetId) this.removePaneTabTarget(paneId, targetId); + this.paneTabTargetIdByKey.delete(key); + this.paneTabWebContents.delete(key); + } + + /** + * Correlate a renderer-side tab spawn (result of a + * create-tab-requested event) with its Chromium CDP targetId for + * the gateway's Target.createTarget waiter. Requires registerTab + * to have already captured the targetId; if it hasn't yet, + * listen for the next addPaneTabTarget on the pane and use that. + */ + acknowledgeTabCreated( + paneId: string, + requestId: string, + tabId: string, + ): void { + const key = this.tabKey(paneId, tabId); + const existing = this.paneTabTargetIdByKey.get(key); + console.log( + "[tab-diag] acknowledgeTabCreated pane=", + paneId, + "tab=", + tabId, + "req=", + requestId, + "existingTarget=", + existing ?? "(waiting)", + ); + if (existing) { + this.emit(`tab-target-added-for:${paneId}`, { + requestId, + targetId: existing, + }); + return; + } + const handler = (addedTargetId: string) => { + const current = this.paneTabTargetIdByKey.get(key); + if (current !== addedTargetId) return; + this.off(`tab-target-added:${paneId}`, handler); + this.emit(`tab-target-added-for:${paneId}`, { + requestId, + targetId: addedTargetId, + }); + }; + this.on(`tab-target-added:${paneId}`, handler); + // Safety: drop the listener after 10s so it can't leak. + setTimeout(() => this.off(`tab-target-added:${paneId}`, handler), 10_000); + } + + private tabKey(paneId: string, tabId: string): string { + return `${paneId}::${tabId}`; + } + + /** + * Inverse of registerTab: given a CDP targetId and a paneId, + * return the tabId the pane registered against that target. + * Returns null for the pane's primary (host-level webContents) + * target — callers typically treat that as "already the active + * tab" and no-op. + */ + /** + * Return the webContents id registered for a given (paneId, tabId) + * tuple or the pane's primary if tabId is null. Used by the CDP + * filter to force-focus the right webContents before dispatching + * session-scoped Input.* events — Electron otherwise sometimes + * routes synthetic key events to whichever widget has OS focus + * (including the Superset terminal pane). + */ + getWebContentsIdForTab(paneId: string, tabId: string | null): number | null { + if (tabId === null) { + return this.paneWebContentsIds.get(paneId) ?? null; + } + return this.paneTabWebContents.get(this.tabKey(paneId, tabId)) ?? null; + } + + getTabIdForTarget(paneId: string, targetId: string): string | null { + const prefix = `${paneId}::`; + for (const [key, tid] of this.paneTabTargetIdByKey) { + if (tid === targetId && key.startsWith(prefix)) { + return key.slice(prefix.length); + } + } + return null; + } + + /** + * Find which pane (if any) a given CDP targetId belongs to. Used + * by the CDP filter to route Target.activateTarget / + * Page.bringToFront back to the renderer so the tab bar UI + * follows MCP-driven tab switches (matches Chrome's behaviour). + */ + getPaneIdForTarget(targetId: string): string | null { + for (const [paneId, primary] of this.paneTargetIds) { + if (primary === targetId) return paneId; + } + for (const [key, tid] of this.paneTabTargetIdByKey) { + if (tid === targetId) { + const sep = key.indexOf("::"); + if (sep > 0) return key.slice(0, sep); + } + } + return null; + } + + /** + * Emit an activate-tab event for the given pane so subscribers + * (the BrowserPane renderer) can flip their tab-bar UI. Called + * by the CDP filter when MCP sends Target.activateTarget / + * Page.bringToFront for a tab we know about. + * + * When tabId is null the pane's primary is meant (i.e. the + * non-secondary tab driven by usePersistentWebview); the + * renderer uses that to reveal the primary and hide secondaries. + */ + requestTabActivation(paneId: string, tabId: string | null): void { + this.emit(`activate-tab-requested:${paneId}`, { tabId }); + } + + getPaneIdForWebContents(webContentsId: number): string | null { + for (const [paneId, registeredWebContentsId] of this.paneWebContentsIds) { + if (registeredWebContentsId === webContentsId) { + return paneId; + } + } + + return null; + } + navigate(paneId: string, url: string): void { const wc = this.getWebContents(paneId); if (!wc) throw new Error(`No webContents for pane ${paneId}`); @@ -94,7 +609,11 @@ class BrowserManager extends EventEmitter { const wc = this.getWebContents(paneId); if (!wc) throw new Error(`No webContents for pane ${paneId}`); const image = await wc.capturePage(); - clipboard.writeImage(image); + try { + clipboard.writeImage(image); + } catch (error) { + console.error("[browser-manager] clipboard.writeImage failed:", error); + } return image.toPNG().toString("base64"); } @@ -114,6 +633,335 @@ class BrowserManager extends EventEmitter { wc.openDevTools({ mode: "detach" }); } + /** + * Surface the native Electron print dialog for the pane. Called + * from the renderer when the user hits Cmd+P, and also + * indirectly via window.print() (Chromium routes that through + * the webContents print event which Electron handles). + */ + print(paneId: string): void { + const wc = this.getWebContents(paneId); + if (!wc) return; + try { + wc.print({ silent: false, printBackground: true }); + } catch (error) { + console.warn("[browser-manager] print failed:", error); + } + } + + findInPage( + paneId: string, + text: string, + options?: { forward?: boolean; findNext?: boolean; matchCase?: boolean }, + ): number | null { + const wc = this.getWebContents(paneId); + if (!wc || !text) return null; + return wc.findInPage(text, options); + } + + stopFindInPage( + paneId: string, + action: "clearSelection" | "keepSelection" | "activateSelection", + ): void { + const wc = this.getWebContents(paneId); + if (!wc) return; + wc.stopFindInPage(action); + } + + /** + * Listen for native `found-in-page` results and for Cmd/Ctrl+F keypresses + * happening inside the webview. The renderer cannot see keydown events + * dispatched to the guest page, so we intercept them here via + * `before-input-event` and emit a request to open the find overlay. + */ + private setupFindInPage(paneId: string, wc: Electron.WebContents): void { + const foundHandler = (_event: Electron.Event, result: Electron.Result) => { + this.emit(`found-in-page:${paneId}`, { + requestId: result.requestId, + activeMatchOrdinal: result.activeMatchOrdinal, + matches: result.matches, + finalUpdate: result.finalUpdate, + }); + }; + + const inputHandler = (event: Electron.Event, input: Electron.Input) => { + if (input.type !== "keyDown") return; + const isFindKey = + (input.meta || input.control) && + input.key.toLowerCase() === "f" && + !input.alt && + !input.shift; + if (isFindKey) { + event.preventDefault(); + this.emit(`find-requested:${paneId}`); + return; + } + if (input.key === "Escape") { + this.emit(`find-escape:${paneId}`); + } + }; + + wc.on("found-in-page", foundHandler); + wc.on("before-input-event", inputHandler); + + this.findListeners.set(paneId, () => { + try { + wc.off("found-in-page", foundHandler); + wc.off("before-input-event", inputHandler); + } catch { + // webContents may be destroyed + } + }); + } + + /** + * Configure child windows created by window.open() (OAuth popups etc.). + * The child BrowserWindow preserves window.opener so postMessage-based + * auth flows work correctly. + */ + private setupPopupWindowHandler( + paneId: string, + wc: Electron.WebContents, + ): void { + const handler = (childWindow: BrowserWindow, { url }: { url: string }) => { + const childWc = childWindow.webContents; + + // Strip Electron token from child window's User-Agent + const originalUA = childWc.getUserAgent(); + childWc.setUserAgent(getChromeLikeUserAgent(originalUA)); + + // If the popup navigates to about:blank or a javascript: URI, it likely + // means the auth flow finished and the opener consumed the result. + childWc.on("will-navigate", (_event, navUrl) => { + if (navUrl === "about:blank") { + childWindow.close(); + } + }); + + // Some OAuth flows close the popup themselves via window.close() in JS. + // That is handled natively by Electron. We also handle the case where the + // user manually closes the popup — nothing special is needed. + + console.log(`[browser-manager] Popup opened for pane ${paneId}: ${url}`); + }; + + wc.on("did-create-window", handler); + this.popupListeners.set(paneId, () => { + try { + wc.off("did-create-window", handler); + } catch { + // webContents may be destroyed + } + }); + } + + /** + * Track HTML5 fullscreen enter/leave on webview content (e.g. YouTube + * video fullscreen). The BrowserWindow also enters fullscreen natively + * (like Chrome). We emit events so the renderer can adjust its UI + * (hide sidebar/tabs when entering, restore when leaving). + */ + private setupFullscreenHandler( + paneId: string, + wc: Electron.WebContents, + ): void { + const handleEnter = () => { + this.fullscreenPaneId = paneId; + this.emit("fullscreen-change", { paneId, isFullscreen: true }); + }; + + const handleLeave = () => { + if (this.fullscreenPaneId === paneId) { + this.fullscreenPaneId = null; + } + this.emit("fullscreen-change", { paneId, isFullscreen: false }); + }; + + wc.on("enter-html-full-screen", handleEnter); + wc.on("leave-html-full-screen", handleLeave); + + this.fullscreenListeners.set(paneId, () => { + try { + wc.off("enter-html-full-screen", handleEnter); + wc.off("leave-html-full-screen", handleLeave); + } catch { + // webContents may be destroyed + } + }); + } + + /** + * Surface JavaScript dialogs (alert / confirm / prompt / + * beforeunload) as native Electron dialogs so the user can see + * and respond to them. Without this Electron auto-dismisses + * dialogs in webview-hosted content, which silently breaks sites + * that use confirm/prompt for destructive actions and makes + * beforeunload unloads invisible to the user. + */ + private setupJsDialogHandler(paneId: string, wc: Electron.WebContents): void { + // beforeunload: Electron only emits will-prevent-unload when + // the page asked for the confirmation; preventDefault tells + // Chromium to block the unload. + const handleBeforeUnload = (event: Electron.Event) => { + const owner = this.getOwnerWindow(paneId); + const choice = owner + ? dialog.showMessageBoxSync(owner, { + type: "question", + buttons: ["Leave", "Stay"], + defaultId: 0, + cancelId: 1, + title: "Leave site?", + message: "Changes you made may not be saved.", + }) + : 1; + if (choice === 1) event.preventDefault(); + }; + wc.on("will-prevent-unload", handleBeforeUnload); + + // alert/confirm/prompt are surfaced through the internal + // `-run-dialog` event in recent Electron versions. The event + // signature is still undocumented, so we cast defensively. + type RunDialogArgs = [ + event: Electron.Event & { + sender?: unknown; + preventDefault: () => void; + }, + dialogType: "alert" | "confirm" | "prompt", + messageText: string, + defaultPromptText: string, + reply: (shouldContinue: boolean, userInput: string) => void, + ]; + const handleRunDialog = (...args: unknown[]) => { + const [event, dialogType, messageText, defaultPromptText, reply] = + args as RunDialogArgs; + event.preventDefault(); + const owner = this.getOwnerWindow(paneId); + if (!owner) { + reply(false, ""); + return; + } + if (dialogType === "alert") { + dialog + .showMessageBox(owner, { + type: "info", + buttons: ["OK"], + message: messageText || "", + }) + .then(() => reply(true, "")) + .catch(() => reply(false, "")); + } else if (dialogType === "confirm") { + dialog + .showMessageBox(owner, { + type: "question", + buttons: ["OK", "Cancel"], + defaultId: 0, + cancelId: 1, + message: messageText || "", + }) + .then(({ response }) => reply(response === 0, "")) + .catch(() => reply(false, "")); + } else { + // prompt: Electron doesn't have a first-class prompt + // dialog, so we approximate with showMessageBox + + // inputs aren't natively supported — best we can do is + // accept the default text on OK and empty on Cancel + // until a custom modal is wired up. + dialog + .showMessageBox(owner, { + type: "question", + buttons: ["OK", "Cancel"], + defaultId: 0, + cancelId: 1, + message: messageText || "", + detail: defaultPromptText + ? `Default: ${defaultPromptText}` + : undefined, + }) + .then(({ response }) => + reply(response === 0, response === 0 ? defaultPromptText : ""), + ) + .catch(() => reply(false, "")); + } + }; + (wc as unknown as NodeJS.EventEmitter).on("-run-dialog", handleRunDialog); + } + + private getOwnerWindow(paneId: string): BrowserWindow | null { + const wc = this.getWebContents(paneId); + if (!wc) return null; + const { BrowserWindow } = require("electron") as typeof import("electron"); + return ( + BrowserWindow.fromWebContents(wc) ?? BrowserWindow.getFocusedWindow() + ); + } + + /** + * Handle webContents download events. Save to ~/Downloads with + * the suggested filename (de-duplicated) and emit a toast-style + * event so the renderer can surface the completion to the user. + * + * All Superset browser panes share the persist:superset session, + * so the `will-download` listener is registered exactly once per + * session, not per pane. Registering per-pane would pile up + * duplicate handlers across pane open / re-register / tearoff + * lifecycles and fire setSavePath+emit N times per download. + * (Raised in CodeRabbit review on PR #371.) + */ + private setupDownloadHandler(paneId: string, wc: Electron.WebContents): void { + this.ensureDownloadHandlerForSession(wc.session); + // Track which pane last triggered a download on this session so + // the single handler can emit pane-scoped events. + this.lastDownloadInitiator = paneId; + } + + private downloadHandlerInstalled = new WeakSet(); + private lastDownloadInitiator: string | null = null; + + private ensureDownloadHandlerForSession(session: Electron.Session): void { + if (this.downloadHandlerInstalled.has(session)) return; + this.downloadHandlerInstalled.add(session); + session.on("will-download", (_event, item, webContents) => { + const downloadsDir = app.getPath("downloads"); + const suggested = item.getFilename() || "download"; + let target = join(downloadsDir, suggested); + try { + const fs = require("node:fs") as typeof import("node:fs"); + if (fs.existsSync(target)) { + const ext = suggested.includes(".") + ? suggested.slice(suggested.lastIndexOf(".")) + : ""; + const stem = ext + ? suggested.slice(0, suggested.length - ext.length) + : suggested; + target = join(downloadsDir, `${stem}-${Date.now()}${ext}`); + } + } catch { + /* best effort */ + } + item.setSavePath(target); + // Attribute the event to whichever pane owns the + // initiating webContents if we can resolve it; fall back + // to the last pane that registered a webContents on this + // session. This replaces the old duplicate-handler scheme. + const paneId = + (webContents && this.getPaneIdForWebContents(webContents.id)) || + this.lastDownloadInitiator; + if (!paneId) return; + this.emit(`download-started:${paneId}`, { + filename: basename(target), + targetPath: target, + url: item.getURL(), + }); + item.on("done", (_e, state) => { + this.emit(`download-finished:${paneId}`, { + filename: basename(target), + targetPath: target, + state, + }); + }); + }); + } + private setupContextMenu(paneId: string, wc: Electron.WebContents): void { const handler = ( _event: Electron.Event, @@ -141,7 +989,13 @@ class BrowserManager extends EventEmitter { }, { label: "Copy Link Address", - click: () => clipboard.writeText(linkURL), + click: () => { + try { + clipboard.writeText(linkURL); + } catch { + // clipboard unavailable + } + }, }, { type: "separator" }, ); @@ -205,13 +1059,62 @@ class BrowserManager extends EventEmitter { { label: "Copy Page URL", click: () => { - if (pageURL) clipboard.writeText(pageURL); + if (pageURL) { + try { + clipboard.writeText(pageURL); + } catch { + // clipboard unavailable + } + } }, enabled: !!pageURL && pageURL !== "about:blank", }, ); } + menuItems.push( + { type: "separator" }, + { + label: "Copy Element Selector", + submenu: [ + { + label: "CSS Selector", + click: async () => { + try { + const result = (await wc.executeJavaScript( + buildElementPathScript(params.x, params.y), + )) as { cssSelector: string; xpath: string } | null; + if (result?.cssSelector) { + clipboard.writeText(result.cssSelector); + } + } catch { + // page may not support elementFromPoint + } + }, + }, + { + label: "XPath", + click: async () => { + try { + const result = (await wc.executeJavaScript( + buildElementPathScript(params.x, params.y), + )) as { cssSelector: string; xpath: string } | null; + if (result?.xpath) { + clipboard.writeText(result.xpath); + } + } catch { + // page may not support elementFromPoint + } + }, + }, + ], + }, + { + label: "Inspect Element", + click: () => wc.inspectElement(params.x, params.y), + }, + ); + const menu = Menu.buildFromTemplate(menuItems); menu.popup(); }; @@ -261,6 +1164,60 @@ class BrowserManager extends EventEmitter { } }); } + + showContextMenuForWebContents( + wc: Electron.WebContents, + x: number, + y: number, + ): void { + const script = buildElementPathScript(x, y); + const menuItems: Electron.MenuItemConstructorOptions[] = [ + { + label: "Copy Element Selector", + submenu: [ + { + label: "CSS Selector", + click: async () => { + try { + const result = (await wc.executeJavaScript(script)) as { + cssSelector: string; + xpath: string; + } | null; + if (result?.cssSelector) { + clipboard.writeText(result.cssSelector); + } + } catch { + // page may not support elementFromPoint + } + }, + }, + { + label: "XPath", + click: async () => { + try { + const result = (await wc.executeJavaScript(script)) as { + cssSelector: string; + xpath: string; + } | null; + if (result?.xpath) { + clipboard.writeText(result.xpath); + } + } catch { + // page may not support elementFromPoint + } + }, + }, + ], + }, + { type: "separator" }, + { + label: "Inspect Element", + click: () => wc.inspectElement(x, y), + }, + ]; + const menu = Menu.buildFromTemplate(menuItems); + menu.popup(); + } } export const browserManager = new BrowserManager(); diff --git a/apps/desktop/src/main/lib/browser/browser-site-permission-manager.ts b/apps/desktop/src/main/lib/browser/browser-site-permission-manager.ts new file mode 100644 index 00000000000..5e5269d07f2 --- /dev/null +++ b/apps/desktop/src/main/lib/browser/browser-site-permission-manager.ts @@ -0,0 +1,335 @@ +import { EventEmitter } from "node:events"; +import { + browserSitePermissions, + type SitePermissionKind, + type SitePermissionValue, +} from "@superset/local-db"; +import { and, eq } from "drizzle-orm"; +import { session } from "electron"; +import { localDb } from "../local-db"; +import { browserManager } from "./browser-manager"; + +const APP_BROWSER_PARTITION = "persist:superset"; + +const DEFAULT_SITE_PERMISSIONS: Record< + SitePermissionKind, + SitePermissionValue +> = { + microphone: "ask", + camera: "ask", + geolocation: "ask", + notifications: "ask", + "clipboard-read": "ask", +}; + +interface SitePermissionRequestEvent { + paneId: string; + origin: string; + permissions: SitePermissionKind[]; +} + +function normalizeOrigin(value: string): string | null { + if (!value || value === "about:blank") { + return null; + } + + try { + const parsed = new URL(value); + if (parsed.protocol !== "http:" && parsed.protocol !== "https:") { + return null; + } + return parsed.origin; + } catch { + return null; + } +} + +function mediaTypeToPermissionKind( + mediaType: "audio" | "video" | "unknown", +): SitePermissionKind | null { + if (mediaType === "audio") { + return "microphone"; + } + if (mediaType === "video") { + return "camera"; + } + return null; +} + +/** + * Electron's `permission` string for non-media requests. We route a + * subset through the same user-consent flow so sites don't silently + * get (or fail to get) geolocation / notifications / clipboard-read. + */ +function electronPermissionToKind( + permission: string, +): SitePermissionKind | null { + if (permission === "geolocation") return "geolocation"; + if (permission === "notifications") return "notifications"; + if (permission === "clipboard-read") return "clipboard-read"; + return null; +} + +class BrowserSitePermissionManager extends EventEmitter { + private initialized = false; + private lastRequestNotificationAt = new Map(); + + initialize(): void { + if (this.initialized) { + return; + } + + this.initialized = true; + + const browserSession = session.fromPartition(APP_BROWSER_PARTITION); + + browserSession.setPermissionCheckHandler( + (webContents, permission, requestingOrigin, details) => { + const origin = + normalizeOrigin( + (details as { securityOrigin?: string }).securityOrigin ?? "", + ) ?? + normalizeOrigin(requestingOrigin) ?? + normalizeOrigin(webContents?.getURL() ?? ""); + if (!origin) return false; + + if (permission === "media") { + const permissionKind = mediaTypeToPermissionKind( + details.mediaType ?? "unknown", + ); + if (!permissionKind) return false; + return this.getPermission(origin, permissionKind) === "allow"; + } + const kind = electronPermissionToKind(permission); + if (!kind) return false; + return this.getPermission(origin, kind) === "allow"; + }, + ); + + browserSession.setPermissionRequestHandler( + (webContents, permission, callback, details) => { + const origin = + normalizeOrigin( + (details as { securityOrigin?: string }).securityOrigin ?? "", + ) ?? + normalizeOrigin(details.requestingUrl ?? "") ?? + normalizeOrigin(webContents.getURL()); + + if (permission !== "media") { + // Route non-media permissions (geolocation / + // notifications / clipboard-read) through the same + // user-consent flow as media. Permissions we don't + // recognise keep the previous permissive default so + // we don't regress sites that depend on them (e.g. + // midi, background-sync). + const kind = electronPermissionToKind(permission); + if (!kind) { + callback(true); + return; + } + if (!origin) { + callback(false); + return; + } + const stored = this.getPermission(origin, kind); + if (stored === "allow") { + callback(true); + return; + } + if (stored === "block") { + callback(false); + return; + } + const paneId = browserManager.getPaneIdForWebContents(webContents.id); + if (paneId) { + this.emitPermissionRequested({ + paneId, + origin, + permissions: [kind], + }); + } + callback(false); + return; + } + + if (!origin) { + callback(false); + return; + } + + const requestedPermissions = [ + ...new Set( + ( + (details as { mediaTypes?: ("audio" | "video" | "unknown")[] }) + .mediaTypes ?? [] + ) + .map((mediaType) => mediaTypeToPermissionKind(mediaType)) + .filter((value): value is SitePermissionKind => value !== null), + ), + ]; + + if (requestedPermissions.length === 0) { + callback(false); + return; + } + + const blocked = requestedPermissions.some( + (permissionKind) => + this.getPermission(origin, permissionKind) === "block", + ); + if (blocked) { + callback(false); + return; + } + + const unresolvedPermissions = requestedPermissions.filter( + (permissionKind) => + this.getPermission(origin, permissionKind) !== "allow", + ); + + if (unresolvedPermissions.length === 0) { + callback(true); + return; + } + + const paneId = browserManager.getPaneIdForWebContents(webContents.id); + if (paneId) { + this.emitPermissionRequested({ + paneId, + origin, + permissions: unresolvedPermissions, + }); + } + + callback(false); + }, + ); + } + + getPermissionsForUrl(url: string): { + origin: string; + permissions: Record; + } | null { + const origin = normalizeOrigin(url); + if (!origin) { + return null; + } + + return { + origin, + permissions: this.getPermissionsForOrigin(origin), + }; + } + + getPermissionsForOrigin( + origin: string, + ): Record { + const normalizedOrigin = normalizeOrigin(origin); + if (!normalizedOrigin) { + return { ...DEFAULT_SITE_PERMISSIONS }; + } + + const rows = localDb + .select() + .from(browserSitePermissions) + .where(eq(browserSitePermissions.origin, normalizedOrigin)) + .all(); + + const permissions = { ...DEFAULT_SITE_PERMISSIONS }; + for (const row of rows) { + permissions[row.kind] = row.value; + } + + return permissions; + } + + setPermission( + origin: string, + kind: SitePermissionKind, + value: SitePermissionValue, + ): { + origin: string; + permissions: Record; + } { + const normalizedOrigin = normalizeOrigin(origin); + if (!normalizedOrigin) { + throw new Error( + "Site settings are only available for http and https pages", + ); + } + + localDb + .insert(browserSitePermissions) + .values({ + origin: normalizedOrigin, + kind, + value, + createdAt: Date.now(), + updatedAt: Date.now(), + }) + .onConflictDoUpdate({ + target: [browserSitePermissions.origin, browserSitePermissions.kind], + set: { + value, + updatedAt: Date.now(), + }, + }) + .run(); + + return { + origin: normalizedOrigin, + permissions: this.getPermissionsForOrigin(normalizedOrigin), + }; + } + + resetPermissions(origin: string): void { + const normalizedOrigin = normalizeOrigin(origin); + if (!normalizedOrigin) { + throw new Error( + "Site settings are only available for http and https pages", + ); + } + + localDb + .delete(browserSitePermissions) + .where(eq(browserSitePermissions.origin, normalizedOrigin)) + .run(); + } + + private getPermission( + origin: string, + kind: SitePermissionKind, + ): SitePermissionValue { + const normalizedOrigin = normalizeOrigin(origin); + if (!normalizedOrigin) { + return "ask"; + } + + const row = localDb + .select() + .from(browserSitePermissions) + .where( + and( + eq(browserSitePermissions.origin, normalizedOrigin), + eq(browserSitePermissions.kind, kind), + ), + ) + .get(); + + return row?.value ?? "ask"; + } + + private emitPermissionRequested(event: SitePermissionRequestEvent): void { + const dedupeKey = `${event.paneId}:${event.origin}:${[...event.permissions].sort().join(",")}`; + const now = Date.now(); + const previous = this.lastRequestNotificationAt.get(dedupeKey) ?? 0; + if (now - previous < 1500) { + return; + } + + this.lastRequestNotificationAt.set(dedupeKey, now); + this.emit(`permission-requested:${event.paneId}`, event); + } +} + +export const browserSitePermissionManager = new BrowserSitePermissionManager(); diff --git a/apps/desktop/src/main/lib/browser/browser-webview-compat.ts b/apps/desktop/src/main/lib/browser/browser-webview-compat.ts new file mode 100644 index 00000000000..28c0a31bdcf --- /dev/null +++ b/apps/desktop/src/main/lib/browser/browser-webview-compat.ts @@ -0,0 +1,31 @@ +import { join } from "node:path"; +import { session } from "electron"; + +const APP_BROWSER_PARTITION = "persist:superset"; + +// Register a session-level preload for the embedded browser partition. +// It neutralizes File System Access API entrypoints that Electron cannot +// satisfy inside guest web contents: react-dropzone (via file-selector) +// prefers DataTransferItem.getAsFileSystemHandle() on drop and then calls +// FileSystemFileHandle.getFile(), which raises +// NotAllowedError: Failed to execute 'getFile' on 'FileSystemFileHandle': +// The request is not allowed by the user agent or the platform in the +// current context. +// Returning null from getAsFileSystemHandle() makes the library fall back +// to the legacy DataTransferItem.getAsFile() / webkitGetAsEntry() path. + +let initialized = false; + +export function initializeBrowserWebviewCompat(): void { + if (initialized) { + return; + } + + const preloadPath = join(__dirname, "../preload/webview-compat.js"); + const browserSession = session.fromPartition(APP_BROWSER_PARTITION); + const existing = browserSession.getPreloads(); + if (!existing.includes(preloadPath)) { + browserSession.setPreloads([...existing, preloadPath]); + } + initialized = true; +} diff --git a/apps/desktop/src/main/lib/custom-ringtones.ts b/apps/desktop/src/main/lib/custom-ringtones.ts index 83f454a6ecc..237d60c3590 100644 --- a/apps/desktop/src/main/lib/custom-ringtones.ts +++ b/apps/desktop/src/main/lib/custom-ringtones.ts @@ -20,16 +20,38 @@ import { const RINGTONES_ASSETS_DIR = join(SUPERSET_HOME_DIR, "assets", "ringtones"); const CUSTOM_RINGTONE_FILE_STEM = "notification-custom"; +const CUSTOM_RINGTONE_SOURCE_STEM = "notification-custom-source"; const CUSTOM_RINGTONE_METADATA_PATH = join( RINGTONES_ASSETS_DIR, `${CUSTOM_RINGTONE_FILE_STEM}.json`, ); const MAX_CUSTOM_RINGTONE_SIZE_BYTES = 20 * 1024 * 1024; const ALLOWED_AUDIO_EXTENSIONS = new Set([".mp3", ".wav", ".ogg"]); +const ALLOWED_SOURCE_EXTENSIONS = new Set([ + ".mp3", + ".wav", + ".ogg", + ".m4a", + ".aac", + ".opus", + ".webm", +]); + +export interface RingtoneEditState { + startSeconds: number; + endSeconds: number; + fadeInSeconds?: number; + fadeOutSeconds?: number; + playbackRate?: number; + sourceTitle?: string; + sourceUrl?: string; +} interface CustomRingtoneMetadata { name?: string; importedAt?: number; + thumbnailUrl?: string; + editState?: RingtoneEditState; } export interface CustomRingtoneInfo { @@ -37,6 +59,7 @@ export interface CustomRingtoneInfo { name: string; description: string; emoji: string; + thumbnailUrl?: string; } function isAllowedAudioExtension(filePath: string): boolean { @@ -116,12 +139,19 @@ function readCustomRingtoneMetadata(): CustomRingtoneMetadata { } } -function writeCustomRingtoneMetadata(name: string): void { +function writeCustomRingtoneMetadata( + name: string, + importedAt: number = Date.now(), + thumbnailUrl?: string, + editState?: RingtoneEditState, +): void { writeFileSync( CUSTOM_RINGTONE_METADATA_PATH, JSON.stringify({ name, - importedAt: Date.now(), + importedAt, + ...(thumbnailUrl ? { thumbnailUrl } : {}), + ...(editState ? { editState } : {}), }), "utf-8", ); @@ -133,6 +163,99 @@ function writeCustomRingtoneMetadata(name: string): void { } } +function getCustomRingtoneSourceFilename(): string | null { + if (!existsSync(RINGTONES_ASSETS_DIR)) return null; + const candidates = readdirSync(RINGTONES_ASSETS_DIR).filter( + (file) => + file.startsWith(`${CUSTOM_RINGTONE_SOURCE_STEM}.`) && + ALLOWED_SOURCE_EXTENSIONS.has(extname(file).toLowerCase()), + ); + if (candidates.length === 0) return null; + candidates.sort((a, b) => { + const am = statSync(join(RINGTONES_ASSETS_DIR, a)).mtimeMs; + const bm = statSync(join(RINGTONES_ASSETS_DIR, b)).mtimeMs; + return bm - am; + }); + return candidates[0] ?? null; +} + +export function getCustomRingtoneSourcePath(): string | null { + const name = getCustomRingtoneSourceFilename(); + return name ? join(RINGTONES_ASSETS_DIR, name) : null; +} + +function removeExistingSourceFiles(): void { + if (!existsSync(RINGTONES_ASSETS_DIR)) return; + for (const file of readdirSync(RINGTONES_ASSETS_DIR)) { + if (file.startsWith(`${CUSTOM_RINGTONE_SOURCE_STEM}.`)) { + try { + unlinkSync(join(RINGTONES_ASSETS_DIR, file)); + } catch { + // Best effort. + } + } + } +} + +export async function saveCustomRingtoneSource( + sourcePath: string, +): Promise { + ensureCustomRingtonesDir(); + const ext = extname(sourcePath).toLowerCase(); + const dest = join( + RINGTONES_ASSETS_DIR, + `${CUSTOM_RINGTONE_SOURCE_STEM}${ext}`, + ); + if (areSamePath(sourcePath, dest) && existsSync(dest)) { + try { + chmodSync(dest, SUPERSET_SENSITIVE_FILE_MODE); + } catch { + // Best effort. + } + return dest; + } + const tempPath = join( + RINGTONES_ASSETS_DIR, + `.tmp-${CUSTOM_RINGTONE_SOURCE_STEM}-${randomUUID()}${ext}`, + ); + try { + await copyFile(sourcePath, tempPath); + removeExistingSourceFiles(); + await rename(tempPath, dest); + } catch (error) { + if (existsSync(tempPath)) { + try { + await unlink(tempPath); + } catch { + // Best effort cleanup only. + } + } + throw error; + } + try { + chmodSync(dest, SUPERSET_SENSITIVE_FILE_MODE); + } catch { + // Best effort. + } + return dest; +} + +export function getCustomRingtoneEditState(): RingtoneEditState | null { + return readCustomRingtoneMetadata().editState ?? null; +} + +export function updateCustomRingtoneEditState( + editState: RingtoneEditState, +): void { + const existing = readCustomRingtoneMetadata(); + writeCustomRingtoneMetadata( + existing.name ?? "Custom Audio", + existing.importedAt ?? Date.now(), + existing.thumbnailUrl, + editState, + ); +} + export function ensureCustomRingtonesDir(): void { if (!existsSync(RINGTONES_ASSETS_DIR)) { mkdirSync(RINGTONES_ASSETS_DIR, { @@ -160,6 +283,35 @@ export function getCustomRingtonePath(): string | null { return join(RINGTONES_ASSETS_DIR, filename); } +export function deleteCustomRingtone(): void { + if (!existsSync(RINGTONES_ASSETS_DIR)) { + return; + } + removeExistingCustomRingtoneFiles(); + removeExistingSourceFiles(); + if (existsSync(CUSTOM_RINGTONE_METADATA_PATH)) { + try { + unlinkSync(CUSTOM_RINGTONE_METADATA_PATH); + } catch { + // Best effort. + } + } +} + +export function setCustomRingtoneDisplayName(name: string): void { + if (!hasCustomRingtone()) { + return; + } + ensureCustomRingtonesDir(); + const displayName = name.trim().slice(0, 80) || "Custom Audio"; + const existing = readCustomRingtoneMetadata(); + writeCustomRingtoneMetadata( + displayName, + existing.importedAt ?? Date.now(), + existing.thumbnailUrl, + ); +} + export function getCustomRingtoneInfo(): CustomRingtoneInfo | null { if (!hasCustomRingtone()) { return null; @@ -172,11 +324,18 @@ export function getCustomRingtoneInfo(): CustomRingtoneInfo | null { name: metadata.name?.trim() || "Custom Audio", description: "Imported from your local machine", emoji: "SFX", + ...(metadata.thumbnailUrl ? { thumbnailUrl: metadata.thumbnailUrl } : {}), }; } +export interface ImportOptions { + displayName?: string; + thumbnailUrl?: string; +} + export async function importCustomRingtoneFromPath( sourcePath: string, + options?: ImportOptions, ): Promise { if (!isAllowedAudioExtension(sourcePath)) { throw new Error("Only .mp3, .wav, and .ogg files are supported"); @@ -200,7 +359,10 @@ export async function importCustomRingtoneFromPath( RINGTONES_ASSETS_DIR, `${CUSTOM_RINGTONE_FILE_STEM}${ext}`, ); - const displayName = sanitizeDisplayName(basename(sourcePath)); + const displayName = + options?.displayName?.trim().slice(0, 80) || + sanitizeDisplayName(basename(sourcePath)); + const thumbnailUrl = options?.thumbnailUrl; // Re-importing the same file path should not delete the active ringtone. if (areSamePath(sourcePath, destinationPath) && existsSync(destinationPath)) { @@ -209,12 +371,13 @@ export async function importCustomRingtoneFromPath( } catch { // Best effort only. } - writeCustomRingtoneMetadata(displayName); + writeCustomRingtoneMetadata(displayName, Date.now(), thumbnailUrl); return { id: CUSTOM_RINGTONE_ID, name: displayName, description: "Imported from your local machine", emoji: "SFX", + ...(thumbnailUrl ? { thumbnailUrl } : {}), }; } @@ -245,12 +408,13 @@ export async function importCustomRingtoneFromPath( // Best effort only. } - writeCustomRingtoneMetadata(displayName); + writeCustomRingtoneMetadata(displayName, Date.now(), thumbnailUrl); return { id: CUSTOM_RINGTONE_ID, name: displayName, description: "Imported from your local machine", emoji: "SFX", + ...(thumbnailUrl ? { thumbnailUrl } : {}), }; } diff --git a/apps/desktop/src/main/lib/debug-channel.ts b/apps/desktop/src/main/lib/debug-channel.ts new file mode 100644 index 00000000000..4edf178400e --- /dev/null +++ b/apps/desktop/src/main/lib/debug-channel.ts @@ -0,0 +1,76 @@ +import type { + DebugChannelOptions, + DebugChannelTransport, +} from "shared/debug-channel"; +import { createDebugChannel } from "shared/debug-channel"; + +let sentryModulePromise: Promise< + typeof import("@sentry/electron/main") +> | null = null; + +function getSentry() { + if (!sentryModulePromise) { + sentryModulePromise = import("@sentry/electron/main"); + } + return sentryModulePromise; +} + +function createMainTransport(): DebugChannelTransport { + return { + addBreadcrumb(entry) { + void getSentry() + .then((Sentry) => { + Sentry.addBreadcrumb({ + category: entry.namespace, + level: entry.level, + message: entry.message, + data: entry.data, + }); + }) + .catch(() => {}); + }, + captureMessage(entry) { + void getSentry() + .then((Sentry) => { + Sentry.withScope((scope) => { + scope.setLevel(entry.level); + scope.setTag("debug_namespace", entry.namespace); + if (entry.fingerprint) { + scope.setFingerprint(entry.fingerprint); + } + if (entry.data) { + scope.setContext("debug", entry.data); + } + Sentry.captureMessage(`[${entry.namespace}] ${entry.message}`); + }); + }) + .catch(() => {}); + }, + captureException(error, entry) { + void getSentry() + .then((Sentry) => { + Sentry.withScope((scope) => { + scope.setLevel(entry.level); + scope.setTag("debug_namespace", entry.namespace); + if (entry.fingerprint) { + scope.setFingerprint(entry.fingerprint); + } + if (entry.data) { + scope.setContext("debug", entry.data); + } + Sentry.captureException(error); + }); + }) + .catch(() => {}); + }, + }; +} + +export function createMainDebugChannel( + options: Omit, +) { + return createDebugChannel({ + ...options, + transport: createMainTransport(), + }); +} diff --git a/apps/desktop/src/main/lib/extensions/compatibility-checker.ts b/apps/desktop/src/main/lib/extensions/compatibility-checker.ts new file mode 100644 index 00000000000..e8ddb02111a --- /dev/null +++ b/apps/desktop/src/main/lib/extensions/compatibility-checker.ts @@ -0,0 +1,262 @@ +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import { glob } from "fast-glob"; +import type { ChromeManifest } from "./crx-downloader"; + +/** APIs fully supported in Electron */ +const _SUPPORTED_APIS = new Set([ + "chrome.devtools.inspectedWindow", + "chrome.devtools.network", + "chrome.devtools.panels", + "chrome.scripting", + "chrome.webRequest", + "chrome.storage.local", + "chrome.runtime.lastError", + "chrome.runtime.id", + "chrome.runtime.getManifest", + "chrome.runtime.getURL", + "chrome.runtime.connect", + "chrome.runtime.sendMessage", + "chrome.runtime.onConnect", + "chrome.runtime.onMessage", + "chrome.runtime.onInstalled", + "chrome.runtime.onStartup", + "chrome.extension.getURL", + "chrome.extension.getBackgroundPage", +]); + +/** Permissions that Electron cannot provide */ +const UNSUPPORTED_PERMISSIONS = new Set([ + "bookmarks", + "browsingData", + "contentSettings", + "cookies", + "debugger", + "declarativeContent", + "declarativeNetRequest", + "desktopCapture", + "downloads", + "downloads.shelf", + "enterprise.deviceAttributes", + "enterprise.platformKeys", + "fontSettings", + "gcm", + "geolocation", + "history", + "identity", + "idle", + "loginState", + "nativeMessaging", + "notifications", + "pageCapture", + "platformKeys", + "power", + "printerProvider", + "printing", + "printingMetrics", + "privacy", + "proxy", + "search", + "sessions", + "signedInDevices", + "system.cpu", + "system.display", + "system.memory", + "system.storage", + "tabCapture", + "tabGroups", + "topSites", + "tts", + "ttsEngine", + "wallpaper", + "webNavigation", +]); + +/** chrome.* API patterns that don't work in Electron */ +const UNSUPPORTED_API_PATTERNS = [ + "chrome.bookmarks", + "chrome.browsingData", + "chrome.contentSettings", + "chrome.cookies", + "chrome.debugger", + "chrome.declarativeContent", + "chrome.declarativeNetRequest", + "chrome.desktopCapture", + "chrome.downloads", + "chrome.fontSettings", + "chrome.gcm", + "chrome.history", + "chrome.identity", + "chrome.notifications", + "chrome.pageCapture", + "chrome.privacy", + "chrome.proxy", + "chrome.sessions", + "chrome.tabCapture", + "chrome.tabGroups", + "chrome.topSites", + "chrome.tts", + "chrome.ttsEngine", + "chrome.webNavigation", + "chrome.storage.sync", + "chrome.storage.managed", + "chrome.tabs.create", + "chrome.tabs.remove", + "chrome.tabs.move", + "chrome.tabs.group", + "chrome.tabs.ungroup", + "chrome.tabs.duplicate", + "chrome.tabs.discard", + "chrome.tabs.captureVisibleTab", + "chrome.tabs.goBack", + "chrome.tabs.goForward", + "chrome.windows.create", + "chrome.windows.remove", + "chrome.windows.update", +]; + +export type CompatibilityLevel = "full" | "partial" | "low"; + +export interface CompatibilityIssue { + type: "unsupported_permission" | "unsupported_api" | "unsupported_feature"; + severity: "warning" | "error"; + message: string; + detail?: string; +} + +export interface CompatibilityReport { + level: CompatibilityLevel; + issues: CompatibilityIssue[]; + summary: string; +} + +/** + * Check extension manifest for unsupported features. + */ +function checkManifest(manifest: ChromeManifest): CompatibilityIssue[] { + const issues: CompatibilityIssue[] = []; + + // Check permissions + const allPermissions = [ + ...(manifest.permissions ?? []), + ...(manifest.optional_permissions ?? []), + ]; + + for (const perm of allPermissions) { + if (UNSUPPORTED_PERMISSIONS.has(perm)) { + issues.push({ + type: "unsupported_permission", + severity: "warning", + message: `Permission "${perm}" is not supported in Electron`, + }); + } + } + + // Check chrome_url_overrides + if (manifest.chrome_url_overrides) { + issues.push({ + type: "unsupported_feature", + severity: "error", + message: + "Chrome URL overrides (new tab, history, bookmarks pages) are not supported", + }); + } + + // Check options_ui + if (manifest.options_ui || manifest.options_page) { + issues.push({ + type: "unsupported_feature", + severity: "warning", + message: "Options page may not work as expected", + detail: + "Extension options pages rely on chrome.runtime.openOptionsPage() which has limited support", + }); + } + + return issues; +} + +/** + * Scan the extension's JS files for usage of unsupported chrome.* APIs. + */ +async function scanJsForUnsupportedApis( + extensionDir: string, +): Promise { + const issues: CompatibilityIssue[] = []; + const seen = new Set(); + + const jsFiles = await glob("**/*.js", { + cwd: extensionDir, + absolute: true, + ignore: ["**/node_modules/**"], + }); + + for (const file of jsFiles) { + let content: string; + try { + content = await readFile(file, "utf-8"); + } catch { + continue; + } + + for (const api of UNSUPPORTED_API_PATTERNS) { + if (seen.has(api)) continue; + + // Escape dots for regex, match the API call pattern + const pattern = api.replace(/\./g, "\\."); + const regex = new RegExp(`${pattern}\\b`); + + if (regex.test(content)) { + seen.add(api); + issues.push({ + type: "unsupported_api", + severity: "warning", + message: `Uses "${api}" which is not supported in Electron`, + detail: `Found in ${path.basename(file)}`, + }); + } + } + } + + return issues; +} + +/** + * Run a full compatibility check on an unpacked extension. + */ +export async function checkCompatibility( + extensionDir: string, + manifest: ChromeManifest, +): Promise { + const manifestIssues = checkManifest(manifest); + const apiIssues = await scanJsForUnsupportedApis(extensionDir); + + const issues = [...manifestIssues, ...apiIssues]; + + const errorCount = issues.filter((i) => i.severity === "error").length; + const warningCount = issues.filter((i) => i.severity === "warning").length; + + let level: CompatibilityLevel; + if (errorCount > 0 || warningCount >= 5) { + level = "low"; + } else if (warningCount > 0) { + level = "partial"; + } else { + level = "full"; + } + + let summary: string; + switch (level) { + case "full": + summary = "This extension is expected to work well in Electron."; + break; + case "partial": + summary = `This extension may have limited functionality (${warningCount} potential issue${warningCount > 1 ? "s" : ""}).`; + break; + case "low": + summary = `This extension is likely incompatible (${errorCount} critical, ${warningCount} warning${warningCount > 1 ? "s" : ""}).`; + break; + } + + return { level, issues, summary }; +} diff --git a/apps/desktop/src/main/lib/extensions/crx-downloader.ts b/apps/desktop/src/main/lib/extensions/crx-downloader.ts new file mode 100644 index 00000000000..93fee16c671 --- /dev/null +++ b/apps/desktop/src/main/lib/extensions/crx-downloader.ts @@ -0,0 +1,254 @@ +import { existsSync, mkdirSync } from "node:fs"; +import { mkdir, readFile, rm, writeFile } from "node:fs/promises"; +import os from "node:os"; +import path from "node:path"; +import { app, net } from "electron"; +import JSZip from "jszip"; + +/** Electron version string used in the CRX download URL */ +const ELECTRON_VERSION = process.versions.chrome ?? "130.0.0.0"; + +const CRX_DOWNLOAD_URL = + "https://clients2.google.com/service/update2/crx?response=redirect&prodversion=VERSION&acceptformat=crx2,crx3&x=id%3DID%26uc"; + +/** + * Parse a Chrome Web Store URL or raw extension ID into just the extension ID. + * + * Accepts: + * - Full URL: https://chromewebstore.google.com/detail/some-name/abcdefghijklmnopabcdefghijklmnop + * - Short URL: https://chrome.google.com/webstore/detail/abcdefghijklmnopabcdefghijklmnop + * - Raw 32-char extension ID: abcdefghijklmnopabcdefghijklmnop + */ +export function parseExtensionId(input: string): string | null { + const trimmed = input.trim(); + + // Raw extension ID (32 lowercase alpha chars) + if (/^[a-p]{32}$/.test(trimmed)) return trimmed; + + try { + const url = new URL(trimmed); + // New Chrome Web Store: /detail// or /detail/ + const segments = url.pathname.split("/").filter(Boolean); + for (const seg of segments) { + if (/^[a-p]{32}$/.test(seg)) return seg; + } + } catch { + // Not a URL + } + + return null; +} + +/** + * Build the CRX download URL from an extension ID. + */ +function buildCrxUrl(extensionId: string): string { + return CRX_DOWNLOAD_URL.replace("VERSION", ELECTRON_VERSION).replace( + "ID", + extensionId, + ); +} + +/** + * Get the root directory where user-installed extensions are stored. + */ +export function getExtensionsDir(): string { + return path.join(app.getPath("userData"), "extensions"); +} + +/** + * Download a CRX file from Google's update servers. + * Returns the path to the downloaded CRX file. + */ +async function downloadCrx(extensionId: string): Promise { + const tmpDir = path.join(os.tmpdir(), `superset-crx-${extensionId}`); + if (!existsSync(tmpDir)) mkdirSync(tmpDir, { recursive: true }); + + const crxPath = path.join(tmpDir, `${extensionId}.crx`); + const url = buildCrxUrl(extensionId); + + const response = await net.fetch(url, { redirect: "follow" }); + if (!response.ok) { + throw new Error( + `Failed to download extension ${extensionId}: HTTP ${response.status}`, + ); + } + + const body = response.body; + if (!body) throw new Error("Empty response body"); + + const arrayBuffer = await response.arrayBuffer(); + await writeFile(crxPath, Buffer.from(arrayBuffer)); + + return crxPath; +} + +/** + * Strip the CRX header and extract the ZIP payload. + * + * CRX3 format: + * [4 bytes] "Cr24" magic number + * [4 bytes] CRX version (3) + * [4 bytes] header length + * [header_length bytes] protobuf header + * [rest] ZIP data + * + * CRX2 format: + * [4 bytes] "Cr24" magic number + * [4 bytes] CRX version (2) + * [4 bytes] public key length + * [4 bytes] signature length + * [public_key_length bytes] public key + * [signature_length bytes] signature + * [rest] ZIP data + */ +function extractZipFromCrx(crxBuffer: Buffer): Buffer { + const magic = crxBuffer.toString("ascii", 0, 4); + if (magic !== "Cr24") { + // Maybe it's already a ZIP + if (crxBuffer[0] === 0x50 && crxBuffer[1] === 0x4b) { + return crxBuffer; + } + throw new Error(`Invalid CRX file: unexpected magic "${magic}"`); + } + + const version = crxBuffer.readUInt32LE(4); + + if (version === 3) { + const headerLength = crxBuffer.readUInt32LE(8); + const zipStart = 12 + headerLength; + return crxBuffer.subarray(zipStart); + } + + if (version === 2) { + const pubKeyLength = crxBuffer.readUInt32LE(8); + const sigLength = crxBuffer.readUInt32LE(12); + const zipStart = 16 + pubKeyLength + sigLength; + return crxBuffer.subarray(zipStart); + } + + throw new Error(`Unsupported CRX version: ${version}`); +} + +/** + * Unpack a ZIP buffer into the target directory. + */ +async function unpackZip(zipBuffer: Buffer, targetDir: string): Promise { + const zip = await JSZip.loadAsync(zipBuffer); + + await mkdir(targetDir, { recursive: true }); + + const entries = Object.entries(zip.files); + for (const [relativePath, file] of entries) { + const fullPath = path.join(targetDir, relativePath); + + if (file.dir) { + await mkdir(fullPath, { recursive: true }); + continue; + } + + // Ensure parent directory exists + await mkdir(path.dirname(fullPath), { recursive: true }); + + const content = await file.async("nodebuffer"); + await writeFile(fullPath, content); + } +} + +export interface CrxDownloadResult { + extensionId: string; + extensionDir: string; + manifest: ChromeManifest; +} + +export interface ChromeManifest { + manifest_version: number; + name: string; + version: string; + description?: string; + permissions?: string[]; + optional_permissions?: string[]; + host_permissions?: string[]; + background?: { + service_worker?: string; + scripts?: string[]; + page?: string; + }; + content_scripts?: Array<{ + matches: string[]; + js?: string[]; + css?: string[]; + run_at?: string; + }>; + action?: { + default_popup?: string; + default_icon?: string | Record; + default_title?: string; + }; + browser_action?: { + default_popup?: string; + default_icon?: string | Record; + default_title?: string; + }; + icons?: Record; + devtools_page?: string; + chrome_url_overrides?: Record; + options_ui?: { page: string; open_in_tab?: boolean }; + options_page?: string; +} + +/** + * Download and install an extension from the Chrome Web Store. + * + * 1. Download the CRX + * 2. Strip the CRX header to get the ZIP + * 3. Extract into userData/extensions/ + * 4. Return the extracted manifest + */ +export async function downloadAndExtractExtension( + extensionId: string, +): Promise { + const extensionsRoot = getExtensionsDir(); + const extensionDir = path.join(extensionsRoot, extensionId); + + // Clean up any previous install + if (existsSync(extensionDir)) { + await rm(extensionDir, { recursive: true, force: true }); + } + + let crxPath: string | null = null; + try { + // Download + crxPath = await downloadCrx(extensionId); + + // Extract ZIP from CRX + const crxBuffer = await readFile(crxPath); + const zipBuffer = extractZipFromCrx(crxBuffer); + + // Unpack + await unpackZip(zipBuffer, extensionDir); + + // Read manifest + const manifestPath = path.join(extensionDir, "manifest.json"); + if (!existsSync(manifestPath)) { + throw new Error("Extension does not contain a manifest.json"); + } + const manifest: ChromeManifest = JSON.parse( + await readFile(manifestPath, "utf-8"), + ); + + return { extensionId, extensionDir, manifest }; + } catch (error) { + // Clean up on failure + if (existsSync(extensionDir)) { + await rm(extensionDir, { recursive: true, force: true }).catch(() => {}); + } + throw error; + } finally { + // Clean up temp CRX + if (crxPath) { + const tmpDir = path.dirname(crxPath); + await rm(tmpDir, { recursive: true, force: true }).catch(() => {}); + } + } +} diff --git a/apps/desktop/src/main/lib/extensions/extension-icon-protocol.ts b/apps/desktop/src/main/lib/extensions/extension-icon-protocol.ts new file mode 100644 index 00000000000..54d163c995d --- /dev/null +++ b/apps/desktop/src/main/lib/extensions/extension-icon-protocol.ts @@ -0,0 +1,89 @@ +import { existsSync } from "node:fs"; +import path from "node:path"; +import { pathToFileURL } from "node:url"; +import { net } from "electron"; +import type { ChromeManifest } from "./crx-downloader"; +import { getExtensionsDir } from "./crx-downloader"; + +/** + * Resolve the best icon file path from a manifest's action or icons field. + * + * Tries `action.default_icon` first (string or size map), then falls back + * to `manifest.icons`. Returns the absolute path to the icon file, or null. + */ +function resolveIconFile( + extensionDir: string, + manifest: ChromeManifest, + requestedSize: number, +): string | null { + const action = manifest.action ?? manifest.browser_action; + const iconSource = action?.default_icon ?? manifest.icons; + + if (!iconSource) return null; + + // Single string path + if (typeof iconSource === "string") { + const fullPath = path.join(extensionDir, iconSource); + return existsSync(fullPath) ? fullPath : null; + } + + // Record – find closest size + const sizes = Object.keys(iconSource) + .map(Number) + .filter(Number.isFinite) + .sort((a, b) => a - b); + + if (sizes.length === 0) return null; + + // Pick the smallest size >= requestedSize, or the largest available + const bestSize = + sizes.find((s) => s >= requestedSize) ?? sizes[sizes.length - 1]; + + const iconRelPath = iconSource[String(bestSize)]; + if (!iconRelPath) return null; + + const fullPath = path.join(extensionDir, iconRelPath); + return existsSync(fullPath) ? fullPath : null; +} + +/** + * Create a protocol handler that serves extension icon images. + * + * URL format: `superset-ext-icon://{extensionId}/{size}` + * e.g. `superset-ext-icon://abcdefghijklmnopabcdefghijklmnop/32` + * + * The handler reads the extension's manifest.json to locate the best + * matching icon file and returns it via `net.fetch`. + */ +export function createExtensionIconProtocolHandler(): ( + request: Request, +) => Response | Promise { + return async (request: Request) => { + try { + const url = new URL(request.url); + const extensionId = url.hostname; + const size = Number.parseInt(url.pathname.replace(/^\//, ""), 10) || 32; + + const extensionDir = path.join(getExtensionsDir(), extensionId); + const manifestPath = path.join(extensionDir, "manifest.json"); + + if (!existsSync(manifestPath)) { + return new Response("Extension not found", { status: 404 }); + } + + const { readFile } = await import("node:fs/promises"); + const manifest: ChromeManifest = JSON.parse( + await readFile(manifestPath, "utf-8"), + ); + + const iconPath = resolveIconFile(extensionDir, manifest, size); + if (!iconPath) { + return new Response("Icon not found", { status: 404 }); + } + + return net.fetch(pathToFileURL(iconPath).toString()); + } catch { + return new Response("Internal error", { status: 500 }); + } + }; +} diff --git a/apps/desktop/src/main/lib/extensions/extension-manager.ts b/apps/desktop/src/main/lib/extensions/extension-manager.ts new file mode 100644 index 00000000000..f55b25aa6ff --- /dev/null +++ b/apps/desktop/src/main/lib/extensions/extension-manager.ts @@ -0,0 +1,339 @@ +import { existsSync } from "node:fs"; +import { readFile, rm, writeFile } from "node:fs/promises"; +import path from "node:path"; +import { app, session } from "electron"; +import type { CompatibilityReport } from "./compatibility-checker"; +import { checkCompatibility } from "./compatibility-checker"; +import { + type ChromeManifest, + downloadAndExtractExtension, + getExtensionsDir, + parseExtensionId, +} from "./crx-downloader"; + +const APP_PARTITION = "persist:superset"; + +export interface InstalledExtension { + id: string; + /** Extension ID assigned by Electron (derived from path, may differ from Chrome Web Store ID) */ + electronId?: string; + name: string; + version: string; + description: string; + enabled: boolean; + installedAt: string; + compatibility: CompatibilityReport; + iconPath?: string; +} + +interface ExtensionStore { + extensions: InstalledExtension[]; +} + +function getStorePath(): string { + return path.join(app.getPath("userData"), "extension-store.json"); +} + +async function readStore(): Promise { + const storePath = getStorePath(); + try { + const data = await readFile(storePath, "utf-8"); + return JSON.parse(data) as ExtensionStore; + } catch { + return { extensions: [] }; + } +} + +async function writeStore(store: ExtensionStore): Promise { + const storePath = getStorePath(); + await writeFile(storePath, JSON.stringify(store, null, 2), "utf-8"); +} + +/** + * Resolve the best icon path from the manifest icons object. + */ +function resolveIconPath( + manifest: ChromeManifest, + extensionDir: string, +): string | undefined { + if (!manifest.icons) return undefined; + + const sizes = Object.keys(manifest.icons) + .map(Number) + .sort((a, b) => b - a); + + for (const size of sizes) { + const iconRelPath = manifest.icons[String(size)]; + if (iconRelPath) { + const fullPath = path.join(extensionDir, iconRelPath); + if (existsSync(fullPath)) return fullPath; + } + } + + return undefined; +} + +/** + * Load all enabled extensions into the Electron session. + * Called at app startup. + */ +export async function loadInstalledExtensions(): Promise { + const store = await readStore(); + const ses = session.fromPartition(APP_PARTITION); + let storeUpdated = false; + + for (const ext of store.extensions) { + if (!ext.enabled) continue; + + const extensionDir = path.join(getExtensionsDir(), ext.id); + if (!existsSync(path.join(extensionDir, "manifest.json"))) { + console.warn( + `[extensions] Extension ${ext.id} (${ext.name}) directory missing, skipping`, + ); + continue; + } + + try { + const loaded = await ses.extensions.loadExtension(extensionDir); + // Persist the Electron-assigned ID (may differ from Chrome Web Store ID) + if (loaded.id !== ext.electronId) { + ext.electronId = loaded.id; + storeUpdated = true; + } + console.log( + `[extensions] Loaded: ${ext.name} v${ext.version} (electronId=${loaded.id})`, + ); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + if (message.includes("already loaded")) continue; + console.error(`[extensions] Failed to load ${ext.name}:`, error); + } + } + + if (storeUpdated) { + await writeStore(store); + } +} + +/** + * Install an extension from the Chrome Web Store. + */ +export async function installExtension( + input: string, +): Promise { + const extensionId = parseExtensionId(input); + if (!extensionId) { + throw new Error( + "Invalid input. Please provide a Chrome Web Store URL or extension ID.", + ); + } + + // Check if already installed + const store = await readStore(); + const existing = store.extensions.find((e) => e.id === extensionId); + if (existing) { + throw new Error(`Extension "${existing.name}" is already installed.`); + } + + // Download and extract + const result = await downloadAndExtractExtension(extensionId); + + // Run compatibility check + const compatibility = await checkCompatibility( + result.extensionDir, + result.manifest, + ); + + const iconPath = resolveIconPath(result.manifest, result.extensionDir); + + const installed: InstalledExtension = { + id: extensionId, + name: result.manifest.name, + version: result.manifest.version, + description: result.manifest.description ?? "", + enabled: true, + installedAt: new Date().toISOString(), + compatibility, + iconPath, + }; + + // Load into session and capture Electron-assigned ID + const ses = session.fromPartition(APP_PARTITION); + try { + const loaded = await ses.extensions.loadExtension(result.extensionDir); + installed.electronId = loaded.id; + console.log( + `[extensions] Installed and loaded: ${installed.name} v${installed.version} (electronId=${loaded.id})`, + ); + } catch (error) { + console.error( + `[extensions] Installed but failed to load ${installed.name}:`, + error, + ); + installed.enabled = false; + } + + // Persist + store.extensions.push(installed); + await writeStore(store); + + return installed; +} + +/** + * Uninstall an extension. + */ +export async function uninstallExtension(extensionId: string): Promise { + const store = await readStore(); + const idx = store.extensions.findIndex((e) => e.id === extensionId); + if (idx === -1) { + throw new Error("Extension not found."); + } + + const ext = store.extensions[idx]; + + // Unload from session (try both IDs) + const ses = session.fromPartition(APP_PARTITION); + for (const id of [ext.electronId, ext.id]) { + if (!id) continue; + try { + ses.extensions.removeExtension(id); + break; + } catch { + // May not be loaded with this ID + } + } + + // Remove files + const extensionDir = path.join(getExtensionsDir(), extensionId); + if (existsSync(extensionDir)) { + await rm(extensionDir, { recursive: true, force: true }); + } + + // Update store + store.extensions.splice(idx, 1); + await writeStore(store); + + console.log(`[extensions] Uninstalled: ${extensionId}`); +} + +/** + * Toggle an extension's enabled state. + */ +export async function toggleExtension( + extensionId: string, + enabled: boolean, +): Promise { + const store = await readStore(); + const ext = store.extensions.find((e) => e.id === extensionId); + if (!ext) { + throw new Error("Extension not found."); + } + + const ses = session.fromPartition(APP_PARTITION); + + if (enabled) { + const extensionDir = path.join(getExtensionsDir(), extensionId); + if (!existsSync(path.join(extensionDir, "manifest.json"))) { + throw new Error("Extension files are missing. Please reinstall."); + } + try { + const loaded = await ses.extensions.loadExtension(extensionDir); + ext.electronId = loaded.id; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + if (!message.includes("already loaded")) { + throw new Error(`Failed to enable extension: ${message}`); + } + } + } else { + for (const id of [ext.electronId, ext.id]) { + if (!id) continue; + try { + ses.extensions.removeExtension(id); + break; + } catch { + // Already unloaded or wrong ID + } + } + } + + ext.enabled = enabled; + await writeStore(store); + + return ext; +} + +/** + * List all installed extensions. + */ +export async function listExtensions(): Promise { + const store = await readStore(); + return store.extensions; +} + +export interface ExtensionToolbarInfo { + id: string; + /** Electron-assigned extension ID (used for chrome-extension:// URLs) */ + electronId: string; + name: string; + enabled: boolean; + hasPopup: boolean; + popupPath: string | null; + actionTitle: string | null; +} + +/** + * Get toolbar-relevant info for all enabled extensions that have a popup action. + */ +export async function getExtensionsWithToolbarInfo(): Promise< + ExtensionToolbarInfo[] +> { + const store = await readStore(); + const ses = session.fromPartition(APP_PARTITION); + const results: ExtensionToolbarInfo[] = []; + + for (const ext of store.extensions) { + if (!ext.enabled) continue; + + const extensionDir = path.join(getExtensionsDir(), ext.id); + const manifestPath = path.join(extensionDir, "manifest.json"); + + if (!existsSync(manifestPath)) continue; + + let manifest: ChromeManifest; + try { + const data = await readFile(manifestPath, "utf-8"); + manifest = JSON.parse(data) as ChromeManifest; + } catch { + continue; + } + + const action = manifest.action ?? manifest.browser_action; + const hasPopup = !!action?.default_popup; + + if (!hasPopup) continue; + + // Resolve the Electron-assigned ID. + // If not cached, look it up from the session's loaded extensions. + let electronId = ext.electronId; + if (!electronId) { + const loaded = ses.extensions + .getAllExtensions() + .find((e) => e.path === extensionDir || e.name === ext.name); + electronId = loaded?.id ?? ext.id; + } + + results.push({ + id: ext.id, + electronId, + name: ext.name, + enabled: ext.enabled, + hasPopup, + popupPath: action?.default_popup ?? null, + actionTitle: action?.default_title ?? null, + }); + } + + return results; +} diff --git a/apps/desktop/src/main/lib/extensions/extension-popup-manager.ts b/apps/desktop/src/main/lib/extensions/extension-popup-manager.ts new file mode 100644 index 00000000000..6f26e6051f7 --- /dev/null +++ b/apps/desktop/src/main/lib/extensions/extension-popup-manager.ts @@ -0,0 +1,212 @@ +import path from "node:path"; +import { BrowserWindow, nativeTheme, screen, session } from "electron"; +import { getExtensionsDir } from "./crx-downloader"; + +const APP_PARTITION = "persist:superset"; + +/** Max popup dimensions */ +const MAX_WIDTH = 800; +const MAX_HEIGHT = 600; +const MIN_SIZE = 25; + +/** Gap between anchor icon and popup */ +const ANCHOR_GAP = 4; + +interface AnchorRect { + x: number; + y: number; + width: number; + height: number; +} + +/** + * Manages the lifecycle of extension popup BrowserWindows. + * + * Only one popup can be open at a time. Opening a new popup automatically + * closes the previous one. + */ +export class ExtensionPopupManager { + private currentPopup: BrowserWindow | null = null; + + /** + * Open an extension popup window anchored below a toolbar icon. + * + * @param parentWindow The main BrowserWindow (used as parent) + * @param extensionId Extension ID for the chrome-extension:// URL + * @param popupPath Relative path to the popup HTML (e.g. "popup.html") + * @param anchorRect Bounding rect of the icon *relative to the parent window content area* + */ + openPopup( + parentWindow: BrowserWindow, + extensionId: string, + popupPath: string, + anchorRect: AnchorRect, + ): void { + // Close any existing popup + this.closePopup(); + + // Convert content-relative coordinates to screen coordinates + const contentBounds = parentWindow.getContentBounds(); + + const screenAnchor = { + x: contentBounds.x + anchorRect.x, + y: contentBounds.y + anchorRect.y, + width: anchorRect.width, + height: anchorRect.height, + }; + + // Initial position: centered below the anchor + const initialWidth = 350; + const initialHeight = 400; + let popupX = + screenAnchor.x + + Math.round(screenAnchor.width / 2) - + Math.round(initialWidth / 2); + let popupY = screenAnchor.y + screenAnchor.height + ANCHOR_GAP; + + // Clamp to the display bounds + const display = screen.getDisplayNearestPoint({ + x: screenAnchor.x, + y: screenAnchor.y, + }); + const workArea = display.workArea; + + if (popupX + initialWidth > workArea.x + workArea.width) { + popupX = workArea.x + workArea.width - initialWidth; + } + if (popupX < workArea.x) { + popupX = workArea.x; + } + + // If not enough space below, show above the anchor + if (popupY + initialHeight > workArea.y + workArea.height) { + popupY = screenAnchor.y - initialHeight - ANCHOR_GAP; + } + if (popupY < workArea.y) { + popupY = workArea.y; + } + + const popup = new BrowserWindow({ + parent: parentWindow, + modal: false, + show: false, + frame: false, + transparent: false, + backgroundColor: nativeTheme.shouldUseDarkColors ? "#252525" : "#ffffff", + resizable: false, + movable: false, + minimizable: false, + maximizable: false, + fullscreenable: false, + skipTaskbar: true, + width: initialWidth, + height: initialHeight, + x: popupX, + y: popupY, + webPreferences: { + session: session.fromPartition(APP_PARTITION), + nodeIntegration: false, + contextIsolation: true, + // sandbox must be false — sandboxed renderers cannot load + // chrome-extension:// URLs (ERR_BLOCKED_BY_CLIENT) + sandbox: false, + enablePreferredSizeMode: true, + }, + }); + + this.currentPopup = popup; + + // Auto-resize when popup content changes size + popup.webContents.on("preferred-size-changed", (_event, preferredSize) => { + if (popup.isDestroyed()) return; + + const width = Math.min( + MAX_WIDTH, + Math.max(MIN_SIZE, preferredSize.width), + ); + const height = Math.min( + MAX_HEIGHT, + Math.max(MIN_SIZE, preferredSize.height), + ); + + // Re-center horizontally relative to anchor + let newX = + screenAnchor.x + + Math.round(screenAnchor.width / 2) - + Math.round(width / 2); + + // Clamp to work area + if (newX + width > workArea.x + workArea.width) { + newX = workArea.x + workArea.width - width; + } + if (newX < workArea.x) { + newX = workArea.x; + } + + popup.setBounds({ + x: newX, + y: popupY, + width, + height, + }); + }); + + // Show after the page loads to avoid flicker + popup.webContents.on("did-finish-load", () => { + if (!popup.isDestroyed()) { + popup.show(); + popup.focus(); + } + }); + + // Close when the popup loses focus + popup.on("blur", () => { + if (popup.isDestroyed()) return; + // Don't close if devtools is open (for debugging) + if (popup.webContents.isDevToolsOpened()) return; + this.closePopup(); + }); + + popup.on("closed", () => { + if (this.currentPopup === popup) { + this.currentPopup = null; + } + }); + + // Load the extension's popup page. + // Try chrome-extension:// first (enables full chrome.* API access). + // Fall back to loading from the local file path if blocked. + const popupUrl = `chrome-extension://${extensionId}/${popupPath}`; + popup.webContents.loadURL(popupUrl).catch((error) => { + const msg = error instanceof Error ? error.message : String(error); + console.warn( + `[extensions] chrome-extension:// load failed for ${extensionId}, trying file:// fallback:`, + msg, + ); + + // Fallback: load the popup HTML directly from disk + const filePath = path.join(getExtensionsDir(), extensionId, popupPath); + popup.webContents.loadFile(filePath).catch((fileError) => { + console.error( + `[extensions] Failed to load popup for ${extensionId}:`, + fileError, + ); + this.closePopup(); + }); + }); + } + + closePopup(): void { + if (this.currentPopup && !this.currentPopup.isDestroyed()) { + this.currentPopup.destroy(); + } + this.currentPopup = null; + } + + isOpen(): boolean { + return this.currentPopup !== null && !this.currentPopup.isDestroyed(); + } +} + +/** Singleton instance */ +export const extensionPopupManager = new ExtensionPopupManager(); diff --git a/apps/desktop/src/main/lib/file-streaming.ts b/apps/desktop/src/main/lib/file-streaming.ts new file mode 100644 index 00000000000..1f4ddf6f8ae --- /dev/null +++ b/apps/desktop/src/main/lib/file-streaming.ts @@ -0,0 +1,226 @@ +import { createReadStream } from "node:fs"; +import { stat } from "node:fs/promises"; +import type { IncomingMessage, ServerResponse } from "node:http"; +import { extname } from "node:path"; +import { Readable } from "node:stream"; + +export const AUDIO_MIME_TYPES: Record = { + ".mp3": "audio/mpeg", + ".wav": "audio/wav", + ".ogg": "audio/ogg", + ".oga": "audio/ogg", + ".m4a": "audio/mp4", + ".aac": "audio/aac", + ".flac": "audio/flac", + ".opus": "audio/ogg", + ".weba": "audio/webm", +}; + +export const VIDEO_MIME_TYPES: Record = { + ".mp4": "video/mp4", + ".webm": "video/webm", + ".mov": "video/quicktime", + ".m4v": "video/mp4", + ".ogv": "video/ogg", +}; + +export const MEDIA_MIME_TYPES: Record = { + ...AUDIO_MIME_TYPES, + ...VIDEO_MIME_TYPES, +}; + +type FileResponseOptions = { + cacheControl?: string; + contentType?: string; +}; + +type ByteRange = { + start: number; + end: number; +}; + +function parseRangeHeader( + rangeHeader: string | null, + fileSize: number, +): ByteRange | "unsatisfiable" | null { + if (!rangeHeader) return null; + + const match = /^bytes=(\d*)-(\d*)$/.exec(rangeHeader.trim()); + if (!match) return null; + + const startStr = match[1]; + const endStr = match[2]; + let start: number; + let end: number; + + if (startStr === "" && endStr !== "") { + const suffix = Number.parseInt(endStr, 10); + if (!Number.isFinite(suffix) || suffix <= 0) { + return "unsatisfiable"; + } + start = Math.max(0, fileSize - suffix); + end = fileSize - 1; + } else { + start = Number.parseInt(startStr, 10); + end = endStr ? Number.parseInt(endStr, 10) : fileSize - 1; + } + + if ( + !Number.isFinite(start) || + !Number.isFinite(end) || + start > end || + start < 0 || + start >= fileSize + ) { + return "unsatisfiable"; + } + + return { + start, + end: Math.min(end, fileSize - 1), + }; +} + +function createWebStream(filePath: string, range?: ByteRange): ReadableStream { + const nodeStream = range + ? createReadStream(filePath, range) + : createReadStream(filePath); + return Readable.toWeb(nodeStream) as unknown as ReadableStream; +} + +function createHeaders( + contentType: string, + fileSize: number, + cacheControl: string, + range?: ByteRange, +): Record { + if (range) { + return { + "Content-Type": contentType, + "Content-Length": String(range.end - range.start + 1), + "Content-Range": `bytes ${range.start}-${range.end}/${fileSize}`, + "Accept-Ranges": "bytes", + "Cache-Control": cacheControl, + }; + } + + return { + "Content-Type": contentType, + "Content-Length": String(fileSize), + "Accept-Ranges": "bytes", + "Cache-Control": cacheControl, + }; +} + +export function getMediaMimeType(filePath: string): string | null { + const key = filePath.startsWith(".") + ? filePath.toLowerCase() + : extname(filePath).toLowerCase(); + return MEDIA_MIME_TYPES[key] ?? null; +} + +export function isSupportedMediaFile(filePath: string): boolean { + return getMediaMimeType(filePath) !== null; +} + +export async function createFileProtocolResponse( + request: Request, + filePath: string, + options: FileResponseOptions = {}, +): Promise { + let fileSize: number; + try { + const fileStat = await stat(filePath); + if (!fileStat.isFile()) { + return new Response("Not found", { status: 404 }); + } + fileSize = fileStat.size; + } catch { + return new Response("Not found", { status: 404 }); + } + + const contentType = options.contentType ?? "application/octet-stream"; + const cacheControl = options.cacheControl ?? "no-store"; + const range = parseRangeHeader(request.headers.get("range"), fileSize); + + if (range === "unsatisfiable") { + return new Response("Range not satisfiable", { + status: 416, + headers: { "Content-Range": `bytes */${fileSize}` }, + }); + } + + const headers = createHeaders( + contentType, + fileSize, + cacheControl, + range ?? undefined, + ); + const status = range ? 206 : 200; + const body = + request.method === "HEAD" + ? null + : createWebStream(filePath, range ?? undefined); + + return new Response(body, { status, headers }); +} + +export async function writeFileHttpResponse( + req: IncomingMessage, + res: ServerResponse, + filePath: string, + options: FileResponseOptions = {}, +): Promise { + let fileSize: number; + try { + const fileStat = await stat(filePath); + if (!fileStat.isFile()) { + res.writeHead(404, { "Content-Type": "text/plain" }); + res.end("Not found"); + return; + } + fileSize = fileStat.size; + } catch { + res.writeHead(404, { "Content-Type": "text/plain" }); + res.end("Not found"); + return; + } + + const contentType = options.contentType ?? "application/octet-stream"; + const cacheControl = options.cacheControl ?? "no-store"; + const range = parseRangeHeader(req.headers.range ?? null, fileSize); + + if (range === "unsatisfiable") { + res.writeHead(416, { + "Content-Type": "text/plain", + "Content-Range": `bytes */${fileSize}`, + }); + res.end("Range not satisfiable"); + return; + } + + const headers = createHeaders( + contentType, + fileSize, + cacheControl, + range ?? undefined, + ); + const status = range ? 206 : 200; + res.writeHead(status, headers); + + if (req.method === "HEAD") { + res.end(); + return; + } + + const stream = range + ? createReadStream(filePath, range) + : createReadStream(filePath); + stream.on("error", () => { + if (!res.headersSent) { + res.writeHead(500, { "Content-Type": "text/plain" }); + } + res.end("Error reading file"); + }); + stream.pipe(res); +} diff --git a/apps/desktop/src/main/lib/host-service-coordinator.ts b/apps/desktop/src/main/lib/host-service-coordinator.ts index b61fcf3f279..efec5c71b99 100644 --- a/apps/desktop/src/main/lib/host-service-coordinator.ts +++ b/apps/desktop/src/main/lib/host-service-coordinator.ts @@ -7,7 +7,6 @@ import { settings } from "@superset/local-db"; import { getDeviceName, getHashedDeviceId } from "@superset/shared/device-info"; import { app } from "electron"; import { env } from "main/env.main"; -import semver from "semver"; import { env as sharedEnv } from "shared/env.shared"; import { getProcessEnvWithShellPath } from "../../lib/trpc/routers/workspaces/utils/shell-env"; import { SUPERSET_HOME_DIR } from "./app-environment"; @@ -29,15 +28,8 @@ import { import { localDb } from "./local-db"; import { HOOK_PROTOCOL_VERSION } from "./terminal/env"; -/** - * Minimum host-service version this app can work with. Bumping this forces - * the coordinator to kill + respawn any adopted service older than this, - * which is how we prevent the renderer from talking to a stale host-service - * that's missing newly-added procedures/params. - * - * 0.2.0: `workspaceCreation.adopt` gained optional `worktreePath`. - */ -const MIN_HOST_SERVICE_VERSION = "0.2.0"; +/** Minimum host-service version this app can work with. */ +const MIN_HOST_SERVICE_VERSION = "0.1.0"; export type HostServiceStatus = "starting" | "running" | "stopped"; @@ -295,15 +287,9 @@ export class HostServiceCoordinator extends EventEmitter { manifest.endpoint, manifest.authToken, ); - if ( - !version || - !semver.satisfies(version, `>=${MIN_HOST_SERVICE_VERSION}`) - ) { - const reason = version - ? `version ${version} < ${MIN_HOST_SERVICE_VERSION}` - : "version unknown"; + if (version && version < MIN_HOST_SERVICE_VERSION) { console.log( - `[host-service:${organizationId}] Adopted service ${reason}, killing`, + `[host-service:${organizationId}] Adopted service version ${version} < ${MIN_HOST_SERVICE_VERSION}, killing`, ); try { process.kill(manifest.pid, "SIGTERM"); diff --git a/apps/desktop/src/main/lib/language-services/diagnostics-store.ts b/apps/desktop/src/main/lib/language-services/diagnostics-store.ts new file mode 100644 index 00000000000..d728ce240ae --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/diagnostics-store.ts @@ -0,0 +1,184 @@ +import { EventEmitter } from "node:events"; +import type { + LanguageServiceDiagnostic, + LanguageServiceWorkspaceSnapshot, +} from "./types"; + +const MAX_PROBLEMS = 500; + +function diagnosticSortValue(severity: string): number { + switch (severity) { + case "error": + return 0; + case "warning": + return 1; + case "info": + return 2; + default: + return 3; + } +} + +type WorkspaceDiagnostics = Map; + +export class LanguageDiagnosticsStore { + private readonly workspaces = new Map(); + + private readonly versions = new Map(); + + private readonly emitter = new EventEmitter(); + + setFileDiagnostics( + workspaceId: string, + fileKey: string, + diagnostics: LanguageServiceDiagnostic[], + ): void { + const workspaceDiagnostics = + this.workspaces.get(workspaceId) ?? + new Map(); + workspaceDiagnostics.set(fileKey, diagnostics); + this.workspaces.set(workspaceId, workspaceDiagnostics); + this.bump(workspaceId); + } + + clearFileDiagnostics(workspaceId: string, fileKey: string): void { + const workspaceDiagnostics = this.workspaces.get(workspaceId); + if (!workspaceDiagnostics) { + return; + } + + if (!workspaceDiagnostics.delete(fileKey)) { + return; + } + + if (workspaceDiagnostics.size === 0) { + this.workspaces.delete(workspaceId); + } + + this.bump(workspaceId); + } + + clearWorkspace(workspaceId: string): void { + if (!this.workspaces.delete(workspaceId)) { + return; + } + + this.bump(workspaceId); + } + + clearProviderDiagnostics(providerId: string, workspaceId?: string): void { + const fileKeyPrefix = `${providerId}::`; + const targetWorkspaceIds = workspaceId + ? [workspaceId] + : Array.from(this.workspaces.keys()); + + for (const targetWorkspaceId of targetWorkspaceIds) { + const workspaceDiagnostics = this.workspaces.get(targetWorkspaceId); + if (!workspaceDiagnostics) { + continue; + } + + let changed = false; + for (const fileKey of Array.from(workspaceDiagnostics.keys())) { + if (!fileKey.startsWith(fileKeyPrefix)) { + continue; + } + + workspaceDiagnostics.delete(fileKey); + changed = true; + } + + if (!changed) { + continue; + } + + if (workspaceDiagnostics.size === 0) { + this.workspaces.delete(targetWorkspaceId); + } + + this.bump(targetWorkspaceId); + } + } + + getVersion(workspaceId: string): number { + return this.versions.get(workspaceId) ?? 0; + } + + subscribe( + workspaceId: string, + listener: (payload: { version: number }) => void, + ) { + const eventName = this.eventName(workspaceId); + this.emitter.on(eventName, listener); + return () => { + this.emitter.off(eventName, listener); + }; + } + + createSnapshot(args: { + workspaceId: string; + workspacePath: string; + providers: LanguageServiceWorkspaceSnapshot["providers"]; + }): LanguageServiceWorkspaceSnapshot { + const flattened = Array.from( + this.workspaces.get(args.workspaceId)?.values() ?? [], + ) + .flat() + .sort((left, right) => { + const severityDelta = + diagnosticSortValue(left.severity) - + diagnosticSortValue(right.severity); + if (severityDelta !== 0) { + return severityDelta; + } + + const pathDelta = (left.relativePath ?? "").localeCompare( + right.relativePath ?? "", + ); + if (pathDelta !== 0) { + return pathDelta; + } + + const lineDelta = (left.line ?? 0) - (right.line ?? 0); + if (lineDelta !== 0) { + return lineDelta; + } + + return (left.column ?? 0) - (right.column ?? 0); + }); + + const problems = flattened.slice(0, MAX_PROBLEMS); + return { + status: "ready", + workspaceId: args.workspaceId, + workspacePath: args.workspacePath, + providers: args.providers, + problems, + totalCount: flattened.length, + truncated: flattened.length > problems.length, + summary: { + errorCount: flattened.filter((problem) => problem.severity === "error") + .length, + warningCount: flattened.filter( + (problem) => problem.severity === "warning", + ).length, + infoCount: flattened.filter((problem) => problem.severity === "info") + .length, + hintCount: flattened.filter((problem) => problem.severity === "hint") + .length, + }, + }; + } + + private bump(workspaceId: string): void { + const version = (this.versions.get(workspaceId) ?? 0) + 1; + this.versions.set(workspaceId, version); + this.emitter.emit(this.eventName(workspaceId), { version }); + } + + private eventName(workspaceId: string): string { + return `workspace:${workspaceId}`; + } +} + +export const languageDiagnosticsStore = new LanguageDiagnosticsStore(); diff --git a/apps/desktop/src/main/lib/language-services/lsp/ExternalLspLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/lsp/ExternalLspLanguageProvider.ts new file mode 100644 index 00000000000..8910329b855 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/lsp/ExternalLspLanguageProvider.ts @@ -0,0 +1,1071 @@ +import { languageDiagnosticsStore } from "../diagnostics-store"; +import type { + LanguageServiceCallHierarchyItem, + LanguageServiceDiagnostic, + LanguageServiceDocument, + LanguageServiceHover, + LanguageServiceIncomingCall, + LanguageServiceLocation, + LanguageServiceMarkupContent, + LanguageServiceProvider, + LanguageServiceProviderSummary, + LanguageServiceRange, + LanguageServiceRelatedInformation, +} from "../types"; +import { + absolutePathToFileUri, + fileUriToAbsolutePath, + lspSeverityToLanguageServiceSeverity, + offsetToLspPosition, + toRelativeWorkspacePath, +} from "../utils"; +import type { ResolvedLspCommand } from "./command-resolvers"; +import { StdioJsonRpcClient } from "./StdioJsonRpcClient"; + +type OpenDocumentEntry = { + languageId: string; + version: number; + content: string; + uri: string; +}; + +type LspDiagnostic = { + range: { + start: { line: number; character: number }; + end: { line: number; character: number }; + }; + severity?: number; + code?: string | number | { value?: string | number }; + source?: string; + message: string; + relatedInformation?: Array<{ + location: { + uri: string; + range: { + start: { line: number; character: number }; + end: { line: number; character: number }; + }; + }; + message: string; + }>; +}; + +type LspPosition = { line: number; character: number }; +type LspRange = { start: LspPosition; end: LspPosition }; +type LspLocation = { uri: string; range: LspRange }; +type LspLocationLink = { + targetUri: string; + targetRange: LspRange; + targetSelectionRange?: LspRange; +}; +type LspMarkupContent = { + kind?: string; + value?: string; +}; +type LspMarkedString = string | { language?: string; value?: string }; +type LspHover = { + contents?: LspMarkupContent | LspMarkedString | LspMarkedString[]; + range?: LspRange; +}; + +type WorkspaceSession = { + workspaceId: string; + workspacePath: string; + client: StdioJsonRpcClient; + openDocuments: Map; + lastError: string | null; + textDocumentSyncMode: "full" | "incremental"; +}; + +type ProviderArgs = { + workspaceId: string; + workspacePath: string; +}; + +type RefreshRequest = { + method: string; + params?: unknown | ((args: ProviderArgs) => unknown); +}; + +type ExternalLspProviderOptions = { + id: string; + label: string; + description: string; + languageIds: string[]; + resolveServerCommand: + | ((args: ProviderArgs) => Promise) + | ((args: ProviderArgs) => ResolvedLspCommand | null); + mapDocumentLanguageId?: (languageId: string) => string; + initializationOptions?: unknown | ((args: ProviderArgs) => unknown); + configuration?: unknown | ((args: ProviderArgs) => unknown); + refreshRequest?: RefreshRequest | null; + clientCapabilities?: unknown; + defaultSource?: string; +}; + +function resolveTextDocumentSyncMode(result: unknown): "full" | "incremental" { + const textDocumentSync = ( + result as { + capabilities?: { + textDocumentSync?: + | number + | { + change?: number; + }; + }; + } + )?.capabilities?.textDocumentSync; + + if (typeof textDocumentSync === "number") { + return textDocumentSync === 2 ? "incremental" : "full"; + } + + if ( + textDocumentSync && + typeof textDocumentSync === "object" && + textDocumentSync.change === 2 + ) { + return "incremental"; + } + + return "full"; +} + +function getSectionValue( + configuration: unknown, + section?: string | null, +): unknown { + if (!section) { + return configuration ?? null; + } + + const keys = section.split("."); + let current: unknown = configuration; + for (const key of keys) { + if (!current || typeof current !== "object") { + return null; + } + + current = (current as Record)[key]; + if (current === undefined) { + return null; + } + } + + return current; +} + +function lspRangeToLanguageServiceRange( + range: LspRange | undefined, +): LanguageServiceRange | null { + if (!range) { + return null; + } + + return { + line: range.start.line + 1, + column: range.start.character + 1, + endLine: range.end.line + 1, + endColumn: range.end.character + 1, + }; +} + +function lspLocationToLanguageServiceLocation( + location: LspLocation | LspLocationLink, +): LanguageServiceLocation | null { + const targetUri = "targetUri" in location ? location.targetUri : location.uri; + const targetRange = + "targetUri" in location + ? (location.targetSelectionRange ?? location.targetRange) + : location.range; + const absolutePath = fileUriToAbsolutePath(targetUri); + if (!absolutePath) { + return null; + } + + return { + absolutePath, + line: targetRange.start.line + 1, + column: targetRange.start.character + 1, + endLine: targetRange.end.line + 1, + endColumn: targetRange.end.character + 1, + }; +} + +function normalizeMarkedString( + value: LspMarkedString, +): LanguageServiceMarkupContent | null { + if (typeof value === "string") { + return value + ? { + kind: "plaintext", + value, + } + : null; + } + + if (value.language && value.value) { + return { + kind: "markdown", + value: `\`\`\`${value.language}\n${value.value}\n\`\`\``, + }; + } + + if (value.value) { + return { + kind: "plaintext", + value: value.value, + }; + } + + return null; +} + +function normalizeLspHoverContents( + contents: LspHover["contents"], +): LanguageServiceMarkupContent[] { + if (!contents) { + return []; + } + + if (Array.isArray(contents)) { + return contents + .map((item) => normalizeMarkedString(item)) + .filter((item): item is LanguageServiceMarkupContent => item !== null); + } + + if (typeof contents === "string") { + const normalized = normalizeMarkedString(contents); + return normalized ? [normalized] : []; + } + + if ("language" in contents) { + const normalized = normalizeMarkedString(contents); + return normalized ? [normalized] : []; + } + + const markup = contents as LspMarkupContent; + if (markup.value) { + return [ + { + kind: markup.kind === "markdown" ? "markdown" : "plaintext", + value: markup.value, + }, + ]; + } + + return []; +} + +export class ExternalLspLanguageProvider implements LanguageServiceProvider { + readonly id: string; + + readonly label: string; + + readonly description: string; + + readonly languageIds: string[]; + + private readonly sessions = new Map(); + + private readonly pendingSessions = new Map< + string, + Promise + >(); + + private readonly workspaceErrors = new Map(); + + constructor(private readonly options: ExternalLspProviderOptions) { + this.id = options.id; + this.label = options.label; + this.description = options.description; + this.languageIds = options.languageIds; + } + + supportsLanguage(languageId: string): boolean { + return this.languageIds.includes(languageId); + } + + async openDocument(document: LanguageServiceDocument): Promise { + const session = await this.ensureSession( + document.workspaceId, + document.workspacePath, + ); + const uri = absolutePathToFileUri(document.absolutePath); + session.openDocuments.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + uri, + }); + await session.client.notify("textDocument/didOpen", { + textDocument: { + uri, + languageId: this.mapDocumentLanguageId(document.languageId), + version: document.version, + text: document.content, + }, + }); + } + + async changeDocument(document: LanguageServiceDocument): Promise { + const session = await this.ensureSession( + document.workspaceId, + document.workspacePath, + ); + const previous = session.openDocuments.get(document.absolutePath); + if (!previous) { + await this.openDocument(document); + return; + } + + session.openDocuments.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + uri: previous.uri, + }); + + await this.sendDidChange( + session, + previous, + document.version, + document.content, + ); + } + + async closeDocument(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) { + return; + } + + const existing = session.openDocuments.get(args.absolutePath); + session.openDocuments.delete(args.absolutePath); + languageDiagnosticsStore.clearFileDiagnostics( + args.workspaceId, + this.fileKey(args.absolutePath), + ); + + if (existing) { + await session.client.notify("textDocument/didClose", { + textDocument: { + uri: existing.uri, + }, + }); + } + + if (session.openDocuments.size === 0) { + await this.disposeWorkspace(args); + } + } + + async refreshWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) { + return; + } + + try { + const configuration = this.resolveConfiguration(args); + if (configuration !== null) { + await session.client.notify("workspace/didChangeConfiguration", { + settings: configuration, + }); + } + + if (this.options.refreshRequest) { + const refreshParams = + typeof this.options.refreshRequest.params === "function" + ? this.options.refreshRequest.params(args) + : this.options.refreshRequest.params; + await session.client.request( + this.options.refreshRequest.method, + refreshParams, + ); + } else { + for (const entry of session.openDocuments.values()) { + await this.sendDidChange( + session, + entry, + entry.version, + entry.content, + ); + } + } + session.lastError = null; + this.workspaceErrors.delete(args.workspaceId); + } catch (error) { + session.lastError = + error instanceof Error ? error.message : String(error); + this.workspaceErrors.set(args.workspaceId, session.lastError); + } + } + + getWorkspaceSummary(args: { + workspaceId: string; + workspacePath: string; + enabled: boolean; + }): LanguageServiceProviderSummary { + const session = this.sessions.get(args.workspaceId); + const lastError = + session?.lastError ?? this.workspaceErrors.get(args.workspaceId) ?? null; + + if (!args.enabled) { + return { + providerId: this.id, + label: this.label, + status: "disabled", + details: null, + documentCount: 0, + }; + } + + if (!session) { + return { + providerId: this.id, + label: this.label, + status: lastError ? "error" : "idle", + details: lastError, + documentCount: 0, + }; + } + + return { + providerId: this.id, + label: this.label, + status: lastError ? "error" : "ready", + details: lastError, + documentCount: session.openDocuments.size, + }; + } + + async disposeWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (session) { + await session.client.stop(); + this.sessions.delete(args.workspaceId); + } + + this.workspaceErrors.delete(args.workspaceId); + } + + async findReferences(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) return null; + + try { + const result = (await session.client.request("textDocument/references", { + textDocument: { + uri: absolutePathToFileUri(args.absolutePath), + }, + position: { + line: args.line - 1, + character: args.column - 1, + }, + context: { includeDeclaration: true }, + })) as Array<{ + uri: string; + range: { + start: { line: number; character: number }; + end: { line: number; character: number }; + }; + }> | null; + + if (!result) return null; + + return result + .map((loc) => { + const absPath = fileUriToAbsolutePath(loc.uri); + if (!absPath) return null; + return { + absolutePath: absPath, + line: loc.range.start.line + 1, + column: loc.range.start.character + 1, + endLine: loc.range.end.line + 1, + endColumn: loc.range.end.character + 1, + }; + }) + .filter((loc): loc is LanguageServiceLocation => loc !== null); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + session.lastError = message; + this.workspaceErrors.set(args.workspaceId, message); + return null; + } + } + + async getHover(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) return null; + + try { + const result = (await session.client.request("textDocument/hover", { + textDocument: { + uri: absolutePathToFileUri(args.absolutePath), + }, + position: { + line: args.line - 1, + character: args.column - 1, + }, + })) as LspHover | null; + + const contents = normalizeLspHoverContents(result?.contents); + if (contents.length === 0) { + return null; + } + + session.lastError = null; + this.workspaceErrors.delete(args.workspaceId); + return { + contents, + range: lspRangeToLanguageServiceRange(result?.range), + }; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + session.lastError = message; + this.workspaceErrors.set(args.workspaceId, message); + return null; + } + } + + async getDefinition(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) return null; + + try { + const result = (await session.client.request("textDocument/definition", { + textDocument: { + uri: absolutePathToFileUri(args.absolutePath), + }, + position: { + line: args.line - 1, + character: args.column - 1, + }, + })) as + | LspLocation + | LspLocationLink + | Array + | null; + + const locations = ( + Array.isArray(result) ? result : result ? [result] : [] + ) + .map((location) => lspLocationToLanguageServiceLocation(location)) + .filter( + (location): location is LanguageServiceLocation => location !== null, + ); + + if (locations.length === 0) { + return null; + } + + session.lastError = null; + this.workspaceErrors.delete(args.workspaceId); + return locations; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + session.lastError = message; + this.workspaceErrors.set(args.workspaceId, message); + return null; + } + } + + async prepareCallHierarchy(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) return null; + + try { + const result = (await session.client.request( + "textDocument/prepareCallHierarchy", + { + textDocument: { + uri: absolutePathToFileUri(args.absolutePath), + }, + position: { + line: args.line - 1, + character: args.column - 1, + }, + }, + )) as Array<{ + name: string; + kind: number; + uri: string; + range: { + start: { line: number; character: number }; + end: { line: number; character: number }; + }; + selectionRange: { + start: { line: number; character: number }; + end: { line: number; character: number }; + }; + }> | null; + + if (!result) return null; + + return result + .map((item) => { + const absPath = fileUriToAbsolutePath(item.uri); + if (!absPath) return null; + return { + name: item.name, + kind: String(item.kind), + absolutePath: absPath, + line: item.range.start.line + 1, + column: item.range.start.character + 1, + endLine: item.range.end.line + 1, + endColumn: item.range.end.character + 1, + selectionLine: item.selectionRange.start.line + 1, + selectionColumn: item.selectionRange.start.character + 1, + selectionEndLine: item.selectionRange.end.line + 1, + selectionEndColumn: item.selectionRange.end.character + 1, + }; + }) + .filter( + (item): item is LanguageServiceCallHierarchyItem => item !== null, + ); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + session.lastError = message; + this.workspaceErrors.set(args.workspaceId, message); + return null; + } + } + + async getIncomingCalls(args: { + workspaceId: string; + item: LanguageServiceCallHierarchyItem; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) return null; + + try { + const lspItem = { + name: args.item.name, + kind: Number(args.item.kind), + uri: absolutePathToFileUri(args.item.absolutePath), + range: { + start: { + line: args.item.line - 1, + character: args.item.column - 1, + }, + end: { + line: args.item.endLine - 1, + character: args.item.endColumn - 1, + }, + }, + selectionRange: { + start: { + line: args.item.selectionLine - 1, + character: args.item.selectionColumn - 1, + }, + end: { + line: args.item.selectionEndLine - 1, + character: args.item.selectionEndColumn - 1, + }, + }, + }; + + const result = (await session.client.request( + "callHierarchy/incomingCalls", + { item: lspItem }, + )) as Array<{ + from: { + name: string; + kind: number; + uri: string; + range: { + start: { line: number; character: number }; + end: { line: number; character: number }; + }; + selectionRange: { + start: { line: number; character: number }; + end: { line: number; character: number }; + }; + }; + fromRanges: Array<{ + start: { line: number; character: number }; + end: { line: number; character: number }; + }>; + }> | null; + + if (!result) return null; + + return result + .map((call) => { + const fromPath = fileUriToAbsolutePath(call.from.uri); + if (!fromPath) return null; + return { + from: { + name: call.from.name, + kind: String(call.from.kind), + absolutePath: fromPath, + line: call.from.range.start.line + 1, + column: call.from.range.start.character + 1, + endLine: call.from.range.end.line + 1, + endColumn: call.from.range.end.character + 1, + selectionLine: call.from.selectionRange.start.line + 1, + selectionColumn: call.from.selectionRange.start.character + 1, + selectionEndLine: call.from.selectionRange.end.line + 1, + selectionEndColumn: call.from.selectionRange.end.character + 1, + }, + fromRanges: call.fromRanges.map((r) => ({ + line: r.start.line + 1, + column: r.start.character + 1, + endLine: r.end.line + 1, + endColumn: r.end.character + 1, + })), + }; + }) + .filter((call): call is LanguageServiceIncomingCall => call !== null); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + session.lastError = message; + this.workspaceErrors.set(args.workspaceId, message); + return null; + } + } + + private async ensureSession( + workspaceId: string, + workspacePath: string, + ): Promise { + const existing = this.sessions.get(workspaceId); + if (existing) { + return existing; + } + + const pending = this.pendingSessions.get(workspaceId); + if (pending) { + return pending; + } + + const promise = this.initSession(workspaceId, workspacePath); + this.pendingSessions.set(workspaceId, promise); + try { + return await promise; + } finally { + this.pendingSessions.delete(workspaceId); + } + } + + private async initSession( + workspaceId: string, + workspacePath: string, + ): Promise { + const resolvedCommand = await this.options.resolveServerCommand({ + workspaceId, + workspacePath, + }); + if (!resolvedCommand) { + const message = `${this.label} language server is not available in this environment.`; + this.workspaceErrors.set(workspaceId, message); + throw new Error(message); + } + + let session!: WorkspaceSession; + const client = new StdioJsonRpcClient({ + name: `${this.id}:${workspaceId}`, + command: resolvedCommand.command, + args: resolvedCommand.args, + cwd: resolvedCommand.cwd ?? workspacePath, + env: resolvedCommand.env ?? process.env, + shell: resolvedCommand.shell, + onNotification: (message) => { + this.handleNotification(session, message); + }, + onRequest: async (message) => + await this.handleServerRequest(session, message), + onExit: ({ code, signal }) => { + const error = `${this.label} language server exited (${code ?? "null"}${signal ? `, ${signal}` : ""})`; + session.lastError = error; + this.workspaceErrors.set(workspaceId, error); + this.sessions.delete(workspaceId); + }, + onStderr: (chunk) => { + console.error(`[language-services/${this.id}] stderr`, { + workspaceId, + chunk, + }); + }, + }); + + session = { + workspaceId, + workspacePath, + client, + openDocuments: new Map(), + lastError: null, + textDocumentSyncMode: "full", + }; + + try { + await client.start(); + const workspaceUri = absolutePathToFileUri(workspacePath); + const initializeResult = await client.request("initialize", { + processId: process.pid, + clientInfo: { + name: "Superset Desktop", + version: "1.4.6", + }, + rootUri: workspaceUri, + rootPath: workspacePath, + workspaceFolders: [ + { + uri: workspaceUri, + name: this.workspaceFolderName(workspacePath), + }, + ], + capabilities: this.options.clientCapabilities ?? { + workspace: { + configuration: true, + workspaceFolders: true, + }, + textDocument: { + publishDiagnostics: { + relatedInformation: true, + }, + hover: { + contentFormat: ["markdown", "plaintext"], + }, + definition: { + linkSupport: true, + }, + references: { + dynamicRegistration: false, + }, + callHierarchy: { + dynamicRegistration: false, + }, + documentSymbol: { + dynamicRegistration: false, + }, + }, + }, + initializationOptions: this.resolveInitializationOptions({ + workspaceId, + workspacePath, + }), + }); + await client.notify("initialized", {}); + session.textDocumentSyncMode = + resolveTextDocumentSyncMode(initializeResult); + session.lastError = null; + this.workspaceErrors.delete(workspaceId); + this.sessions.set(workspaceId, session); + return session; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + session.lastError = message; + this.workspaceErrors.set(workspaceId, message); + await client.stop(); + throw error; + } + } + + private async sendDidChange( + session: WorkspaceSession, + previous: OpenDocumentEntry, + version: number, + content: string, + ): Promise { + await session.client.notify("textDocument/didChange", { + textDocument: { + uri: previous.uri, + version, + }, + contentChanges: + session.textDocumentSyncMode === "incremental" + ? [ + { + range: { + start: { line: 0, character: 0 }, + end: offsetToLspPosition( + previous.content, + previous.content.length, + ), + }, + text: content, + }, + ] + : [ + { + text: content, + }, + ], + }); + } + + private handleNotification( + session: WorkspaceSession, + message: { + method: string; + params?: unknown; + }, + ): void { + if (message.method !== "textDocument/publishDiagnostics") { + return; + } + + const params = message.params as + | { + uri?: string; + diagnostics?: LspDiagnostic[]; + } + | undefined; + if (!params?.uri) { + return; + } + + const absolutePath = fileUriToAbsolutePath(params.uri); + if (!absolutePath) { + return; + } + + languageDiagnosticsStore.setFileDiagnostics( + session.workspaceId, + this.fileKey(absolutePath), + (params.diagnostics ?? []).map((diagnostic) => + this.mapDiagnostic(session.workspacePath, absolutePath, diagnostic), + ), + ); + } + + private async handleServerRequest( + session: WorkspaceSession, + message: { + method: string; + params?: unknown; + }, + ): Promise { + switch (message.method) { + case "workspace/configuration": { + const items = (( + message.params as { + items?: Array<{ section?: string | null }> | null; + } + )?.items ?? []) as Array<{ section?: string | null }>; + const configuration = this.resolveConfiguration({ + workspaceId: session.workspaceId, + workspacePath: session.workspacePath, + }); + return items.map((item) => + getSectionValue(configuration, item.section), + ); + } + case "workspace/workspaceFolders": + return [ + { + uri: absolutePathToFileUri(session.workspacePath), + name: this.workspaceFolderName(session.workspacePath), + }, + ]; + case "client/registerCapability": + case "client/unregisterCapability": + case "window/workDoneProgress/create": + return null; + default: + return undefined; + } + } + + private mapDiagnostic( + workspacePath: string, + absolutePath: string, + diagnostic: LspDiagnostic, + ): LanguageServiceDiagnostic { + const relatedInformation = ( + diagnostic.relatedInformation ?? [] + ).map((item) => { + const relatedAbsolutePath = fileUriToAbsolutePath(item.location.uri); + return { + absolutePath: relatedAbsolutePath, + relativePath: relatedAbsolutePath + ? toRelativeWorkspacePath(workspacePath, relatedAbsolutePath) + : null, + line: item.location.range.start.line + 1, + column: item.location.range.start.character + 1, + endLine: item.location.range.end.line + 1, + endColumn: item.location.range.end.character + 1, + message: item.message, + }; + }); + + return { + providerId: this.id, + source: diagnostic.source ?? this.options.defaultSource ?? this.id, + absolutePath, + relativePath: toRelativeWorkspacePath(workspacePath, absolutePath), + line: diagnostic.range.start.line + 1, + column: diagnostic.range.start.character + 1, + endLine: diagnostic.range.end.line + 1, + endColumn: diagnostic.range.end.character + 1, + message: diagnostic.message, + code: + typeof diagnostic.code === "object" + ? (diagnostic.code?.value ?? null) + : (diagnostic.code ?? null), + severity: lspSeverityToLanguageServiceSeverity(diagnostic.severity), + relatedInformation, + }; + } + + private resolveInitializationOptions(args: ProviderArgs): unknown { + return typeof this.options.initializationOptions === "function" + ? this.options.initializationOptions(args) + : this.options.initializationOptions; + } + + private resolveConfiguration(args: ProviderArgs): unknown { + return typeof this.options.configuration === "function" + ? this.options.configuration(args) + : (this.options.configuration ?? null); + } + + private mapDocumentLanguageId(languageId: string): string { + return this.options.mapDocumentLanguageId?.(languageId) ?? languageId; + } + + private workspaceFolderName(workspacePath: string): string { + return workspacePath.split(/[\\/]/).at(-1) || workspacePath; + } + + private fileKey(absolutePath: string): string { + return `${this.id}::${absolutePath}`; + } +} diff --git a/apps/desktop/src/main/lib/language-services/lsp/StdioJsonRpcClient.ts b/apps/desktop/src/main/lib/language-services/lsp/StdioJsonRpcClient.ts new file mode 100644 index 00000000000..0963fc532a9 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/lsp/StdioJsonRpcClient.ts @@ -0,0 +1,356 @@ +import { type ChildProcessWithoutNullStreams, spawn } from "node:child_process"; + +type JsonRpcId = number | string | null; + +type JsonRpcRequestMessage = { + jsonrpc: "2.0"; + id: JsonRpcId; + method: string; + params?: unknown; +}; + +type JsonRpcNotificationMessage = { + jsonrpc: "2.0"; + method: string; + params?: unknown; +}; + +type JsonRpcResponseMessage = { + jsonrpc: "2.0"; + id: JsonRpcId; + result?: unknown; + error?: { + code: number; + message: string; + data?: unknown; + }; +}; + +type JsonRpcMessage = + | JsonRpcRequestMessage + | JsonRpcNotificationMessage + | JsonRpcResponseMessage; + +type PendingRequest = { + resolve: (value: unknown) => void; + reject: (error: Error) => void; +}; + +type StdioJsonRpcClientOptions = { + name: string; + command: string; + args?: string[]; + cwd?: string; + env?: NodeJS.ProcessEnv; + shell?: boolean; + onNotification?: (message: JsonRpcNotificationMessage) => void; + onRequest?: (message: JsonRpcRequestMessage) => Promise | unknown; + onExit?: (payload: { + code: number | null; + signal: NodeJS.Signals | null; + }) => void; + onStderr?: (chunk: string) => void; +}; + +function isJsonRpcResponseMessage( + message: JsonRpcMessage, +): message is JsonRpcResponseMessage { + return "id" in message && !("method" in message); +} + +function isJsonRpcRequestMessage( + message: JsonRpcMessage, +): message is JsonRpcRequestMessage { + return "id" in message && "method" in message; +} + +type ConsumeResult = + | { kind: "message"; body: string; rest: Buffer } + | { kind: "skip"; rest: Buffer } + | null; + +function consumeMessage(buffer: Buffer): ConsumeResult { + const separatorIndex = buffer.indexOf("\r\n\r\n"); + if (separatorIndex === -1) { + return null; + } + + const header = buffer.subarray(0, separatorIndex).toString("utf8"); + const contentLengthMatch = /Content-Length:\s*(\d+)/i.exec(header); + if (!contentLengthMatch) { + // Invalid header — skip past the separator so the buffer can recover + return { kind: "skip", rest: buffer.subarray(separatorIndex + 4) }; + } + + const contentLength = Number(contentLengthMatch[1]); + const bodyStart = separatorIndex + 4; + const bodyEnd = bodyStart + contentLength; + if (buffer.length < bodyEnd) { + return null; + } + + return { + kind: "message", + body: buffer.subarray(bodyStart, bodyEnd).toString("utf8"), + rest: buffer.subarray(bodyEnd), + }; +} + +export class StdioJsonRpcClient { + private process: ChildProcessWithoutNullStreams | null = null; + + private nextId = 0; + + private buffer: Buffer = Buffer.alloc(0); + + private readonly pendingRequests = new Map(); + + private stopping = false; + + constructor(private readonly options: StdioJsonRpcClientOptions) {} + + async start(): Promise { + if (this.process) { + return; + } + + const child = spawn(this.options.command, this.options.args ?? [], { + cwd: this.options.cwd, + env: this.options.env, + shell: this.options.shell, + stdio: ["pipe", "pipe", "pipe"], + }); + + this.process = child; + child.stdout.on("data", (chunk: Buffer) => { + this.handleStdout(chunk); + }); + child.stderr.setEncoding("utf8"); + child.stderr.on("data", (chunk: string) => { + this.options.onStderr?.(chunk); + }); + child.on("exit", (code, signal) => { + this.process = null; + for (const pendingRequest of this.pendingRequests.values()) { + pendingRequest.reject( + new Error( + `${this.options.name} exited (${code ?? "null"}${signal ? `, ${signal}` : ""})`, + ), + ); + } + this.pendingRequests.clear(); + this.options.onExit?.({ code, signal }); + }); + child.on("error", (error) => { + this.process = null; + for (const pendingRequest of this.pendingRequests.values()) { + pendingRequest.reject(error); + } + this.pendingRequests.clear(); + }); + } + + async request( + method: string, + params?: unknown, + timeoutMs = 30_000, + ): Promise { + const id = ++this.nextId; + return await new Promise((resolve, reject) => { + const timer = setTimeout(() => { + this.pendingRequests.delete(id); + reject( + new Error( + `${this.options.name} request "${method}" timed out after ${timeoutMs}ms`, + ), + ); + }, timeoutMs); + + this.pendingRequests.set(id, { + resolve: (value) => { + clearTimeout(timer); + resolve(value); + }, + reject: (error) => { + clearTimeout(timer); + reject(error); + }, + }); + + void this.writeMessage({ + jsonrpc: "2.0", + id, + method, + params, + }).catch((error) => { + clearTimeout(timer); + this.pendingRequests.delete(id); + reject(error); + }); + }); + } + + async notify(method: string, params?: unknown): Promise { + await this.writeMessage({ + jsonrpc: "2.0", + method, + params, + }); + } + + async stop(): Promise { + if (!this.process || this.stopping) { + return; + } + + this.stopping = true; + const child = this.process; + + // Attempt graceful LSP shutdown → exit before killing + try { + await this.request("shutdown", null, 5_000); + await this.notify("exit"); + } catch { + // Graceful path failed — fall through to kill + } + + this.process = null; + this.stopping = false; + child.removeAllListeners(); + if (!child.killed) { + child.kill(); + } + + for (const pendingRequest of this.pendingRequests.values()) { + pendingRequest.reject(new Error(`${this.options.name} stopped`)); + } + this.pendingRequests.clear(); + } + + private handleStdout(chunk: Buffer): void { + this.buffer = Buffer.concat([this.buffer, chunk]); + while (true) { + const result = consumeMessage(this.buffer); + if (!result) { + return; + } + + this.buffer = result.rest; + + if (result.kind === "skip") { + console.warn("[language-services/lsp] Skipped invalid header block", { + name: this.options.name, + }); + continue; + } + + if (!result.body.trim()) { + continue; + } + + try { + const parsed = JSON.parse(result.body) as JsonRpcMessage; + this.handleMessage(parsed); + } catch (error) { + console.error( + "[language-services/lsp] Failed to parse JSON-RPC payload", + { + name: this.options.name, + error, + body: result.body, + }, + ); + } + } + } + + private handleMessage(message: JsonRpcMessage): void { + if (isJsonRpcResponseMessage(message)) { + const requestId = Number(message.id); + const pendingRequest = Number.isNaN(requestId) + ? null + : this.pendingRequests.get(requestId); + if (!pendingRequest) { + return; + } + + this.pendingRequests.delete(requestId); + if (message.error) { + pendingRequest.reject(new Error(message.error.message)); + return; + } + + pendingRequest.resolve(message.result); + return; + } + + if (isJsonRpcRequestMessage(message)) { + void this.handleServerRequest(message); + return; + } + + this.options.onNotification?.(message); + } + + private async handleServerRequest( + message: JsonRpcRequestMessage, + ): Promise { + try { + const result = + (await this.options.onRequest?.(message)) ?? + this.defaultRequestResult(message.method); + await this.writeMessage({ + jsonrpc: "2.0", + id: message.id, + result: result ?? null, + }); + } catch (error) { + await this.writeMessage({ + jsonrpc: "2.0", + id: message.id, + error: { + code: -32603, + message: error instanceof Error ? error.message : String(error), + }, + }); + } + } + + private defaultRequestResult(method: string): unknown { + switch (method) { + case "client/registerCapability": + case "client/unregisterCapability": + case "window/workDoneProgress/create": + return null; + case "workspace/configuration": + return []; + default: + throw new Error(`Unhandled JSON-RPC request: ${method}`); + } + } + + private async writeMessage(message: JsonRpcMessage): Promise { + const child = this.process; + if (!child) { + throw new Error(`${this.options.name} is not running`); + } + + const payload = Buffer.from(JSON.stringify(message), "utf8"); + const header = Buffer.from( + `Content-Length: ${payload.byteLength}\r\n\r\n`, + "utf8", + ); + const combined = Buffer.concat([header, payload]); + + await new Promise((resolve, reject) => { + child.stdin.write(combined, (error) => { + if (error) { + reject(error); + return; + } + + resolve(); + }); + }); + } +} diff --git a/apps/desktop/src/main/lib/language-services/lsp/command-resolvers.ts b/apps/desktop/src/main/lib/language-services/lsp/command-resolvers.ts new file mode 100644 index 00000000000..be6cae4027f --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/lsp/command-resolvers.ts @@ -0,0 +1,104 @@ +import { spawnSync } from "node:child_process"; +import fs from "node:fs/promises"; +import { createRequire } from "node:module"; +import path from "node:path"; + +const require = createRequire(import.meta.url); + +export type ResolvedLspCommand = { + command: string; + args?: string[]; + cwd?: string; + env?: NodeJS.ProcessEnv; + shell?: boolean; +}; + +type NodePackageCommandOptions = { + packageName: string; + binName?: string; + args?: string[]; + cwd?: string; + env?: NodeJS.ProcessEnv; +}; + +type ExecutableCandidate = { + command: string; + args?: string[]; + probeArgs?: string[]; + cwd?: string; + env?: NodeJS.ProcessEnv; + shell?: boolean; +}; + +export async function resolveNodePackageBinCommand( + options: NodePackageCommandOptions, +): Promise { + const packageJsonPath = require.resolve( + `${options.packageName}/package.json`, + ); + const packageRoot = path.dirname(packageJsonPath); + const packageJson = JSON.parse( + await fs.readFile(packageJsonPath, "utf8"), + ) as { + bin?: string | Record; + }; + + const binEntry = + typeof packageJson.bin === "string" + ? packageJson.bin + : options.binName + ? packageJson.bin?.[options.binName] + : Object.values(packageJson.bin ?? {})[0]; + + if (!binEntry) { + throw new Error( + `Package ${options.packageName} does not expose a runnable binary`, + ); + } + + return { + command: process.execPath, + args: [path.join(packageRoot, binEntry), ...(options.args ?? [])], + cwd: options.cwd, + env: { + ...process.env, + ...options.env, + ELECTRON_RUN_AS_NODE: "1", + }, + shell: false, + }; +} + +export function resolveAvailableExecutable( + candidates: ExecutableCandidate[], +): ResolvedLspCommand | null { + for (const candidate of candidates) { + const probeResult = spawnSync( + candidate.command, + candidate.probeArgs ?? ["--version"], + { + cwd: candidate.cwd, + env: { + ...process.env, + ...candidate.env, + }, + shell: candidate.shell, + stdio: "ignore", + timeout: 10_000, + }, + ); + if (probeResult.status !== 0) { + continue; + } + + return { + command: candidate.command, + args: candidate.args, + cwd: candidate.cwd, + env: candidate.env, + shell: candidate.shell, + }; + } + + return null; +} diff --git a/apps/desktop/src/main/lib/language-services/manager.ts b/apps/desktop/src/main/lib/language-services/manager.ts new file mode 100644 index 00000000000..d631e7d78c6 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/manager.ts @@ -0,0 +1,270 @@ +import { languageDiagnosticsStore } from "./diagnostics-store"; +import { CssLanguageProvider } from "./providers/css/CssLanguageProvider"; +import { DartLanguageProvider } from "./providers/dart/DartLanguageProvider"; +import { DockerfileLanguageProvider } from "./providers/dockerfile/DockerfileLanguageProvider"; +import { GoLanguageProvider } from "./providers/go/GoLanguageProvider"; +import { GraphqlLanguageProvider } from "./providers/graphql/GraphqlLanguageProvider"; +import { HtmlLanguageProvider } from "./providers/html/HtmlLanguageProvider"; +import { JsonLanguageProvider } from "./providers/json/JsonLanguageProvider"; +import { PythonLanguageProvider } from "./providers/python/PythonLanguageProvider"; +import { RustLanguageProvider } from "./providers/rust/RustLanguageProvider"; +import { TomlLanguageProvider } from "./providers/toml/TomlLanguageProvider"; +import { TypeScriptLanguageProvider } from "./providers/typescript/TypeScriptLanguageProvider"; +import { YamlLanguageProvider } from "./providers/yaml/YamlLanguageProvider"; +import type { + LanguageServiceCallHierarchyItem, + LanguageServiceDocument, + LanguageServiceHover, + LanguageServiceIncomingCall, + LanguageServiceLocation, + LanguageServiceProvider, + LanguageServiceProviderDescriptor, + LanguageServiceWorkspaceSnapshot, +} from "./types"; + +export class LanguageServiceManager { + private readonly providers: LanguageServiceProvider[] = [ + new TypeScriptLanguageProvider(), + new JsonLanguageProvider(), + new YamlLanguageProvider(), + new HtmlLanguageProvider(), + new CssLanguageProvider(), + new TomlLanguageProvider(), + new DartLanguageProvider(), + new PythonLanguageProvider(), + new GoLanguageProvider(), + new RustLanguageProvider(), + new DockerfileLanguageProvider(), + new GraphqlLanguageProvider(), + ]; + + private readonly enabledProviders = new Map( + this.providers.map((provider) => [provider.id, true] as const), + ); + + private readonly knownWorkspaces = new Map(); + + async syncDocument(document: LanguageServiceDocument): Promise { + this.rememberWorkspace(document.workspaceId, document.workspacePath); + const provider = this.resolveProvider(document.languageId); + if (!provider || !this.isProviderEnabled(provider.id)) { + return; + } + + await provider.changeDocument(document); + } + + async openDocument(document: LanguageServiceDocument): Promise { + this.rememberWorkspace(document.workspaceId, document.workspacePath); + const provider = this.resolveProvider(document.languageId); + if (!provider || !this.isProviderEnabled(provider.id)) { + return; + } + + await provider.openDocument(document); + } + + async closeDocument(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + }): Promise { + const provider = this.resolveProvider(args.languageId); + if (!provider) { + return; + } + + await provider.closeDocument(args); + } + + async refreshWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + this.rememberWorkspace(args.workspaceId, args.workspacePath); + await Promise.all( + this.providers + .filter((provider) => this.isProviderEnabled(provider.id)) + .map((provider) => provider.refreshWorkspace(args)), + ); + } + + async disposeWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + this.knownWorkspaces.delete(args.workspaceId); + await Promise.all( + this.providers.map((provider) => provider.disposeWorkspace(args)), + ); + languageDiagnosticsStore.clearWorkspace(args.workspaceId); + } + + getWorkspaceSnapshot(args: { + workspaceId: string; + workspacePath: string; + }): LanguageServiceWorkspaceSnapshot { + this.rememberWorkspace(args.workspaceId, args.workspacePath); + return languageDiagnosticsStore.createSnapshot({ + workspaceId: args.workspaceId, + workspacePath: args.workspacePath, + providers: this.providers.map((provider) => + provider.getWorkspaceSummary({ + workspaceId: args.workspaceId, + workspacePath: args.workspacePath, + enabled: this.isProviderEnabled(provider.id), + }), + ), + }); + } + + getProviders(): LanguageServiceProviderDescriptor[] { + return this.providers.map((provider) => ({ + providerId: provider.id, + label: provider.label, + description: provider.description, + languageIds: provider.languageIds, + enabled: this.isProviderEnabled(provider.id), + })); + } + + async setProviderEnabled( + providerId: string, + enabled: boolean, + ): Promise { + const provider = this.providers.find( + (candidate) => candidate.id === providerId, + ); + if (!provider) { + return null; + } + + const previous = this.isProviderEnabled(providerId); + if (previous === enabled) { + return { + providerId: provider.id, + label: provider.label, + description: provider.description, + languageIds: provider.languageIds, + enabled, + }; + } + + this.enabledProviders.set(providerId, enabled); + + if (!enabled) { + await Promise.all( + Array.from(this.knownWorkspaces.entries()).map( + async ([workspaceId, workspacePath]) => { + await provider.disposeWorkspace({ + workspaceId, + workspacePath, + }); + }, + ), + ); + languageDiagnosticsStore.clearProviderDiagnostics(providerId); + } + + return { + providerId: provider.id, + label: provider.label, + description: provider.description, + languageIds: provider.languageIds, + enabled, + }; + } + + subscribeToWorkspace( + workspaceId: string, + listener: (payload: { version: number }) => void, + ) { + return languageDiagnosticsStore.subscribe(workspaceId, listener); + } + + async findReferences(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + line: number; + column: number; + }): Promise { + const provider = this.resolveProvider(args.languageId); + if (!provider || !this.isProviderEnabled(provider.id)) return null; + return (await provider.findReferences?.(args)) ?? null; + } + + async getHover(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + line: number; + column: number; + }): Promise { + const provider = this.resolveProvider(args.languageId); + if (!provider || !this.isProviderEnabled(provider.id)) return null; + return (await provider.getHover?.(args)) ?? null; + } + + async getDefinition(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + line: number; + column: number; + }): Promise { + const provider = this.resolveProvider(args.languageId); + if (!provider || !this.isProviderEnabled(provider.id)) return null; + return (await provider.getDefinition?.(args)) ?? null; + } + + async prepareCallHierarchy(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + line: number; + column: number; + }): Promise { + const provider = this.resolveProvider(args.languageId); + if (!provider || !this.isProviderEnabled(provider.id)) return null; + return (await provider.prepareCallHierarchy?.(args)) ?? null; + } + + async getIncomingCalls(args: { + workspaceId: string; + languageId: string; + item: LanguageServiceCallHierarchyItem; + }): Promise { + const provider = this.resolveProvider(args.languageId); + if (!provider || !this.isProviderEnabled(provider.id)) return null; + return ( + (await provider.getIncomingCalls?.({ + workspaceId: args.workspaceId, + item: args.item, + })) ?? null + ); + } + + private isProviderEnabled(providerId: string): boolean { + return this.enabledProviders.get(providerId) ?? false; + } + + private rememberWorkspace(workspaceId: string, workspacePath: string): void { + this.knownWorkspaces.set(workspaceId, workspacePath); + } + + private resolveProvider(languageId: string): LanguageServiceProvider | null { + return ( + this.providers.find((provider) => + provider.supportsLanguage(languageId), + ) ?? null + ); + } +} + +export const languageServiceManager = new LanguageServiceManager(); diff --git a/apps/desktop/src/main/lib/language-services/providers/css/CssLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/css/CssLanguageProvider.ts new file mode 100644 index 00000000000..ab7963edd63 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/css/CssLanguageProvider.ts @@ -0,0 +1,251 @@ +import { + type Diagnostic, + getCSSLanguageService, + getLESSLanguageService, + getSCSSLanguageService, +} from "vscode-css-languageservice"; +import { TextDocument } from "vscode-languageserver-textdocument"; +import { languageDiagnosticsStore } from "../../diagnostics-store"; +import type { + LanguageServiceDiagnostic, + LanguageServiceDocument, + LanguageServiceProvider, + LanguageServiceProviderSummary, +} from "../../types"; +import { + absolutePathToFileUri, + lspSeverityToLanguageServiceSeverity, + toRelativeWorkspacePath, +} from "../../utils"; + +type OpenDocumentEntry = { + languageId: string; + version: number; + content: string; +}; + +type WorkspaceState = { + documents: Map; + lastError: string | null; +}; + +export class CssLanguageProvider implements LanguageServiceProvider { + readonly id = "css"; + + readonly label = "CSS"; + + readonly description = + "CSS, SCSS and LESS diagnostics via vscode-css-languageservice."; + + readonly languageIds = ["css", "scss", "less"]; + + private readonly workspaces = new Map(); + + private readonly cssService = getCSSLanguageService(); + + private readonly scssService = getSCSSLanguageService(); + + private readonly lessService = getLESSLanguageService(); + + supportsLanguage(languageId: string): boolean { + return this.languageIds.includes(languageId); + } + + async openDocument(document: LanguageServiceDocument): Promise { + const workspaceState = this.getOrCreateWorkspaceState(document.workspaceId); + workspaceState.documents.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + }); + await this.validateDocument(document, workspaceState); + } + + async changeDocument(document: LanguageServiceDocument): Promise { + const workspaceState = this.getOrCreateWorkspaceState(document.workspaceId); + workspaceState.documents.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + }); + await this.validateDocument(document, workspaceState); + } + + async closeDocument(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + }): Promise { + const workspaceState = this.workspaces.get(args.workspaceId); + if (!workspaceState) { + return; + } + + workspaceState.documents.delete(args.absolutePath); + languageDiagnosticsStore.clearFileDiagnostics( + args.workspaceId, + this.fileKey(args.absolutePath), + ); + + if (workspaceState.documents.size === 0) { + this.workspaces.delete(args.workspaceId); + } + } + + async refreshWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + const workspaceState = this.workspaces.get(args.workspaceId); + if (!workspaceState) { + return; + } + + for (const [absolutePath, entry] of workspaceState.documents.entries()) { + await this.validateDocument( + { + workspaceId: args.workspaceId, + workspacePath: args.workspacePath, + absolutePath, + languageId: entry.languageId, + content: entry.content, + version: entry.version, + }, + workspaceState, + ); + } + } + + getWorkspaceSummary(args: { + workspaceId: string; + workspacePath: string; + enabled: boolean; + }): LanguageServiceProviderSummary { + const workspaceState = this.workspaces.get(args.workspaceId); + if (!args.enabled) { + return { + providerId: this.id, + label: this.label, + status: "disabled", + details: null, + documentCount: 0, + }; + } + + if (!workspaceState) { + return { + providerId: this.id, + label: this.label, + status: "idle", + details: null, + documentCount: 0, + }; + } + + return { + providerId: this.id, + label: this.label, + status: workspaceState.lastError ? "error" : "ready", + details: workspaceState.lastError, + documentCount: workspaceState.documents.size, + }; + } + + async disposeWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + this.workspaces.delete(args.workspaceId); + } + + private getOrCreateWorkspaceState(workspaceId: string): WorkspaceState { + const existing = this.workspaces.get(workspaceId); + if (existing) { + return existing; + } + + const next: WorkspaceState = { + documents: new Map(), + lastError: null, + }; + this.workspaces.set(workspaceId, next); + return next; + } + + private async validateDocument( + document: LanguageServiceDocument, + workspaceState: WorkspaceState, + ): Promise { + try { + const textDocument = TextDocument.create( + absolutePathToFileUri(document.absolutePath), + document.languageId, + document.version, + document.content, + ); + const languageService = this.getLanguageService(document.languageId); + const stylesheet = languageService.parseStylesheet(textDocument); + const diagnostics = languageService.doValidation( + textDocument, + stylesheet, + ); + workspaceState.lastError = null; + languageDiagnosticsStore.setFileDiagnostics( + document.workspaceId, + this.fileKey(document.absolutePath), + diagnostics.map((diagnostic) => + this.mapDiagnostic( + document.workspacePath, + document.absolutePath, + diagnostic, + ), + ), + ); + } catch (error) { + workspaceState.lastError = + error instanceof Error ? error.message : String(error); + languageDiagnosticsStore.setFileDiagnostics( + document.workspaceId, + this.fileKey(document.absolutePath), + [], + ); + } + } + + private getLanguageService(languageId: string) { + switch (languageId) { + case "scss": + return this.scssService; + case "less": + return this.lessService; + default: + return this.cssService; + } + } + + private mapDiagnostic( + workspacePath: string, + absolutePath: string, + diagnostic: Diagnostic, + ): LanguageServiceDiagnostic { + return { + providerId: this.id, + source: diagnostic.source ?? "css", + absolutePath, + relativePath: toRelativeWorkspacePath(workspacePath, absolutePath), + line: diagnostic.range.start.line + 1, + column: diagnostic.range.start.character + 1, + endLine: diagnostic.range.end.line + 1, + endColumn: diagnostic.range.end.character + 1, + message: diagnostic.message, + code: diagnostic.code ?? null, + severity: lspSeverityToLanguageServiceSeverity(diagnostic.severity), + relatedInformation: [], + }; + } + + private fileKey(absolutePath: string): string { + return `${this.id}::${absolutePath}`; + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/dart/DartLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/dart/DartLanguageProvider.ts new file mode 100644 index 00000000000..37879746501 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/dart/DartLanguageProvider.ts @@ -0,0 +1,599 @@ +import { spawnSync } from "node:child_process"; +import path from "node:path"; +import { languageDiagnosticsStore } from "../../diagnostics-store"; +import { StdioJsonRpcClient } from "../../lsp/StdioJsonRpcClient"; +import type { + LanguageServiceDiagnostic, + LanguageServiceDocument, + LanguageServiceProvider, + LanguageServiceProviderSummary, + LanguageServiceRelatedInformation, +} from "../../types"; +import { + absolutePathToFileUri, + fileUriToAbsolutePath, + lspSeverityToLanguageServiceSeverity, + offsetToLspPosition, + toRelativeWorkspacePath, +} from "../../utils"; + +type OpenDocumentEntry = { + languageId: string; + version: number; + content: string; + uri: string; +}; + +type DartDiagnostic = { + range: { + start: { line: number; character: number }; + end: { line: number; character: number }; + }; + severity?: number; + code?: string | number; + source?: string; + message: string; + relatedInformation?: Array<{ + location: { + uri: string; + range: { + start: { line: number; character: number }; + end: { line: number; character: number }; + }; + }; + message: string; + }>; +}; + +type WorkspaceSession = { + workspaceId: string; + workspacePath: string; + dartCommand: string; + client: StdioJsonRpcClient; + openDocuments: Map; + lastError: string | null; + textDocumentSyncMode: "full" | "incremental"; +}; + +type ResolvedDartCommand = { + command: string; + shell: boolean; +}; + +const SPAWN_TIMEOUT_MS = 10_000; + +function canExecute(command: string, shell: boolean): boolean { + const probe = spawnSync(command, ["--version"], { + stdio: "ignore", + shell, + timeout: SPAWN_TIMEOUT_MS, + }); + return probe.status === 0; +} + +function getEnvCandidateCommands(): string[] { + const executableName = process.platform === "win32" ? "dart.exe" : "dart"; + const wrapperName = process.platform === "win32" ? "dart.bat" : "dart"; + return [ + process.env.DART_SDK + ? path.join(process.env.DART_SDK, "bin", executableName) + : null, + process.env.FLUTTER_ROOT + ? path.join(process.env.FLUTTER_ROOT, "bin", wrapperName) + : null, + process.env.FLUTTER_ROOT + ? path.join( + process.env.FLUTTER_ROOT, + "bin", + "cache", + "dart-sdk", + "bin", + executableName, + ) + : null, + ].filter((candidate): candidate is string => Boolean(candidate)); +} + +function resolveFlutterSdkCommands(): string[] { + const flutterCommand = + process.platform === "win32" ? "flutter.bat" : "flutter"; + const locateCommand = process.platform === "win32" ? "where" : "which"; + const locateResult = spawnSync(locateCommand, [flutterCommand], { + encoding: "utf8", + shell: process.platform === "win32", + timeout: SPAWN_TIMEOUT_MS, + }); + if (locateResult.status !== 0 || !locateResult.stdout) { + return []; + } + + const flutterExecutablePath = locateResult.stdout + .split(/\r?\n/) + .map((line) => line.trim()) + .find(Boolean); + if (!flutterExecutablePath) { + return []; + } + + const flutterBinDir = path.dirname(flutterExecutablePath); + const executableName = process.platform === "win32" ? "dart.exe" : "dart"; + const wrapperName = process.platform === "win32" ? "dart.bat" : "dart"; + + return [ + path.join(flutterBinDir, wrapperName), + path.join(flutterBinDir, "cache", "dart-sdk", "bin", executableName), + ]; +} + +function resolveDartCommand(): ResolvedDartCommand | null { + const pathCommand = process.platform === "win32" ? "dart.bat" : "dart"; + const shell = process.platform === "win32"; + if (canExecute(pathCommand, shell)) { + return { + command: pathCommand, + shell, + }; + } + + for (const candidate of [ + ...getEnvCandidateCommands(), + ...resolveFlutterSdkCommands(), + ]) { + if (!canExecute(candidate, false)) { + continue; + } + + return { + command: candidate, + shell: false, + }; + } + + return null; +} + +function resolveTextDocumentSyncMode(result: unknown): "full" | "incremental" { + const textDocumentSync = ( + result as { + capabilities?: { + textDocumentSync?: + | number + | { + change?: number; + }; + }; + } + )?.capabilities?.textDocumentSync; + + if (typeof textDocumentSync === "number") { + return textDocumentSync === 2 ? "incremental" : "full"; + } + + if ( + textDocumentSync && + typeof textDocumentSync === "object" && + textDocumentSync.change === 2 + ) { + return "incremental"; + } + + return "full"; +} + +export class DartLanguageProvider implements LanguageServiceProvider { + readonly id = "dart"; + + readonly label = "Dart"; + + readonly description = + "Dart and Flutter diagnostics via the Dart language server."; + + readonly languageIds = ["dart"]; + + private readonly sessions = new Map(); + + private readonly pendingSessions = new Map< + string, + Promise + >(); + + private readonly workspaceErrors = new Map(); + + supportsLanguage(languageId: string): boolean { + return languageId === "dart"; + } + + async openDocument(document: LanguageServiceDocument): Promise { + const session = await this.ensureSession( + document.workspaceId, + document.workspacePath, + ); + const uri = absolutePathToFileUri(document.absolutePath); + session.openDocuments.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + uri, + }); + await session.client.notify("textDocument/didOpen", { + textDocument: { + uri, + languageId: "dart", + version: document.version, + text: document.content, + }, + }); + } + + async changeDocument(document: LanguageServiceDocument): Promise { + const session = await this.ensureSession( + document.workspaceId, + document.workspacePath, + ); + const previous = session.openDocuments.get(document.absolutePath); + if (!previous) { + await this.openDocument(document); + return; + } + + session.openDocuments.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + uri: previous.uri, + }); + + await session.client.notify("textDocument/didChange", { + textDocument: { + uri: previous.uri, + version: document.version, + }, + contentChanges: + session.textDocumentSyncMode === "incremental" + ? [ + { + range: { + start: { line: 0, character: 0 }, + end: offsetToLspPosition( + previous.content, + previous.content.length, + ), + }, + text: document.content, + }, + ] + : [ + { + text: document.content, + }, + ], + }); + } + + async closeDocument(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) { + return; + } + + const existing = session.openDocuments.get(args.absolutePath); + session.openDocuments.delete(args.absolutePath); + languageDiagnosticsStore.clearFileDiagnostics( + args.workspaceId, + this.fileKey(args.absolutePath), + ); + + if (existing) { + await session.client.notify("textDocument/didClose", { + textDocument: { + uri: existing.uri, + }, + }); + } + + if (session.openDocuments.size === 0) { + await this.disposeWorkspace(args); + } + } + + async refreshWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) { + return; + } + + try { + await session.client.request("dart/reanalyze"); + session.lastError = null; + } catch (error) { + session.lastError = + error instanceof Error ? error.message : String(error); + this.workspaceErrors.set(args.workspaceId, session.lastError); + } + } + + getWorkspaceSummary(args: { + workspaceId: string; + workspacePath: string; + enabled: boolean; + }): LanguageServiceProviderSummary { + const session = this.sessions.get(args.workspaceId); + const lastError = + session?.lastError ?? this.workspaceErrors.get(args.workspaceId) ?? null; + + if (!args.enabled) { + return { + providerId: this.id, + label: this.label, + status: "disabled", + details: null, + documentCount: 0, + }; + } + + if (!session) { + return { + providerId: this.id, + label: this.label, + status: lastError ? "error" : "idle", + details: lastError, + documentCount: 0, + }; + } + + return { + providerId: this.id, + label: this.label, + status: lastError ? "error" : "ready", + details: lastError, + documentCount: session.openDocuments.size, + }; + } + + async disposeWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (session) { + await session.client.stop(); + this.sessions.delete(args.workspaceId); + } + + this.workspaceErrors.delete(args.workspaceId); + } + + private async ensureSession( + workspaceId: string, + workspacePath: string, + ): Promise { + const existing = this.sessions.get(workspaceId); + if (existing) { + return existing; + } + + const pending = this.pendingSessions.get(workspaceId); + if (pending) { + return pending; + } + + const promise = this.initSession(workspaceId, workspacePath); + this.pendingSessions.set(workspaceId, promise); + try { + return await promise; + } finally { + this.pendingSessions.delete(workspaceId); + } + } + + private async initSession( + workspaceId: string, + workspacePath: string, + ): Promise { + const resolvedDartCommand = resolveDartCommand(); + if (!resolvedDartCommand) { + const error = + "dart command not found. Install Dart or Flutter, or set DART_SDK / FLUTTER_ROOT."; + this.workspaceErrors.set(workspaceId, error); + throw new Error(error); + } + + let session!: WorkspaceSession; + const client = new StdioJsonRpcClient({ + name: `dart:${workspaceId}`, + command: resolvedDartCommand.command, + args: [ + "language-server", + "--client-id", + "superset.desktop", + "--client-version", + "1.4.6", + ], + cwd: workspacePath, + env: process.env, + shell: resolvedDartCommand.shell, + onNotification: (message) => { + this.handleNotification(session, message); + }, + onRequest: async (message) => await this.handleServerRequest(message), + onExit: ({ code, signal }) => { + const error = `dart language-server exited (${code ?? "null"}${signal ? `, ${signal}` : ""})`; + session.lastError = error; + this.workspaceErrors.set(workspaceId, error); + this.sessions.delete(workspaceId); + }, + onStderr: (chunk) => { + console.error("[language-services/dart] stderr", { + workspaceId, + chunk, + }); + }, + }); + + session = { + workspaceId, + workspacePath, + dartCommand: resolvedDartCommand.command, + client, + openDocuments: new Map(), + lastError: null, + textDocumentSyncMode: "full", + }; + + try { + await client.start(); + const workspaceUri = absolutePathToFileUri(workspacePath); + const initializeResult = await client.request("initialize", { + processId: process.pid, + clientInfo: { + name: "Superset Desktop", + version: "1.4.6", + }, + rootUri: workspaceUri, + rootPath: workspacePath, + workspaceFolders: [ + { + uri: workspaceUri, + name: path.basename(workspacePath), + }, + ], + capabilities: { + workspace: { + configuration: true, + workspaceFolders: true, + }, + textDocument: { + publishDiagnostics: { + relatedInformation: true, + }, + }, + }, + initializationOptions: { + onlyAnalyzeProjectsWithOpenFiles: true, + }, + }); + await client.notify("initialized", {}); + session.textDocumentSyncMode = + resolveTextDocumentSyncMode(initializeResult); + session.lastError = null; + this.workspaceErrors.delete(workspaceId); + this.sessions.set(workspaceId, session); + return session; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + session.lastError = message; + this.workspaceErrors.set(workspaceId, message); + await client.stop(); + throw error; + } + } + + private handleNotification( + session: WorkspaceSession, + message: { + method: string; + params?: unknown; + }, + ): void { + if (message.method !== "textDocument/publishDiagnostics") { + return; + } + + const params = message.params as + | { + uri?: string; + diagnostics?: DartDiagnostic[]; + } + | undefined; + if (!params?.uri) { + return; + } + + const absolutePath = fileUriToAbsolutePath(params.uri); + if (!absolutePath) { + return; + } + + languageDiagnosticsStore.setFileDiagnostics( + session.workspaceId, + this.fileKey(absolutePath), + (params.diagnostics ?? []).map((diagnostic) => + this.mapDiagnostic(session.workspacePath, absolutePath, diagnostic), + ), + ); + } + + private async handleServerRequest(message: { + method: string; + params?: unknown; + }): Promise { + if (message.method !== "workspace/configuration") { + return undefined; + } + + const items = (( + message.params as { items?: Array<{ section?: string | null }> | null } + )?.items ?? []) as Array<{ section?: string | null }>; + return items.map((item) => { + if (item.section === "dart") { + return { + showTodos: false, + }; + } + + return null; + }); + } + + private mapDiagnostic( + workspacePath: string, + absolutePath: string, + diagnostic: DartDiagnostic, + ): LanguageServiceDiagnostic { + const relatedInformation = ( + diagnostic.relatedInformation ?? [] + ).map((item) => { + const relatedAbsolutePath = + fileUriToAbsolutePath(item.location.uri) ?? absolutePath; + return { + absolutePath: relatedAbsolutePath, + relativePath: toRelativeWorkspacePath( + workspacePath, + relatedAbsolutePath, + ), + line: item.location.range.start.line + 1, + column: item.location.range.start.character + 1, + endLine: item.location.range.end.line + 1, + endColumn: item.location.range.end.character + 1, + message: item.message, + }; + }); + + return { + providerId: this.id, + source: diagnostic.source ?? "dart", + absolutePath, + relativePath: toRelativeWorkspacePath(workspacePath, absolutePath), + line: diagnostic.range.start.line + 1, + column: diagnostic.range.start.character + 1, + endLine: diagnostic.range.end.line + 1, + endColumn: diagnostic.range.end.character + 1, + message: diagnostic.message, + code: diagnostic.code ?? null, + severity: lspSeverityToLanguageServiceSeverity(diagnostic.severity), + relatedInformation, + }; + } + + private fileKey(absolutePath: string): string { + return `${this.id}::${absolutePath}`; + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/dockerfile/DockerfileLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/dockerfile/DockerfileLanguageProvider.ts new file mode 100644 index 00000000000..93623e30d2b --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/dockerfile/DockerfileLanguageProvider.ts @@ -0,0 +1,22 @@ +import { resolveNodePackageBinCommand } from "../../lsp/command-resolvers"; +import { ExternalLspLanguageProvider } from "../../lsp/ExternalLspLanguageProvider"; + +export class DockerfileLanguageProvider extends ExternalLspLanguageProvider { + constructor() { + super({ + id: "dockerfile", + label: "Dockerfile", + description: + "Dockerfile diagnostics via dockerfile-language-server-nodejs.", + languageIds: ["dockerfile"], + defaultSource: "dockerfile", + resolveServerCommand: async ({ workspacePath }) => + await resolveNodePackageBinCommand({ + packageName: "dockerfile-language-server-nodejs", + binName: "docker-langserver", + args: ["--stdio"], + cwd: workspacePath, + }), + }); + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/go/GoLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/go/GoLanguageProvider.ts new file mode 100644 index 00000000000..1ae6c4cb006 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/go/GoLanguageProvider.ts @@ -0,0 +1,23 @@ +import { resolveAvailableExecutable } from "../../lsp/command-resolvers"; +import { ExternalLspLanguageProvider } from "../../lsp/ExternalLspLanguageProvider"; + +export class GoLanguageProvider extends ExternalLspLanguageProvider { + constructor() { + super({ + id: "go", + label: "Go", + description: "Go diagnostics via gopls.", + languageIds: ["go"], + defaultSource: "gopls", + resolveServerCommand: () => + resolveAvailableExecutable([ + { + command: process.platform === "win32" ? "gopls.exe" : "gopls", + args: ["serve"], + probeArgs: ["version"], + shell: false, + }, + ]), + }); + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/graphql/GraphqlLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/graphql/GraphqlLanguageProvider.ts new file mode 100644 index 00000000000..bbdcc2ec7dd --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/graphql/GraphqlLanguageProvider.ts @@ -0,0 +1,28 @@ +import { resolveNodePackageBinCommand } from "../../lsp/command-resolvers"; +import { ExternalLspLanguageProvider } from "../../lsp/ExternalLspLanguageProvider"; + +export class GraphqlLanguageProvider extends ExternalLspLanguageProvider { + constructor() { + super({ + id: "graphql", + label: "GraphQL", + description: "GraphQL diagnostics via graphql-language-service-cli.", + languageIds: ["graphql"], + defaultSource: "graphql", + resolveServerCommand: async ({ workspacePath }) => + await resolveNodePackageBinCommand({ + packageName: "graphql-language-service-cli", + binName: "graphql-lsp", + args: ["server", "-m", "stream"], + cwd: workspacePath, + }), + configuration: { + "graphql-config": { + load: { + legacy: true, + }, + }, + }, + }); + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/html/HtmlLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/html/HtmlLanguageProvider.ts new file mode 100644 index 00000000000..81916bc5f4a --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/html/HtmlLanguageProvider.ts @@ -0,0 +1,21 @@ +import { resolveNodePackageBinCommand } from "../../lsp/command-resolvers"; +import { ExternalLspLanguageProvider } from "../../lsp/ExternalLspLanguageProvider"; + +export class HtmlLanguageProvider extends ExternalLspLanguageProvider { + constructor() { + super({ + id: "html", + label: "HTML", + description: "HTML diagnostics via vscode-html-language-server.", + languageIds: ["html"], + defaultSource: "html", + resolveServerCommand: async ({ workspacePath }) => + await resolveNodePackageBinCommand({ + packageName: "vscode-langservers-extracted", + binName: "vscode-html-language-server", + args: ["--stdio"], + cwd: workspacePath, + }), + }); + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/json/JsonLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/json/JsonLanguageProvider.ts new file mode 100644 index 00000000000..d8307306d69 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/json/JsonLanguageProvider.ts @@ -0,0 +1,306 @@ +import fs from "node:fs/promises"; +import { + type Diagnostic, + getLanguageService, +} from "vscode-json-languageservice"; +import { TextDocument } from "vscode-languageserver-textdocument"; +import { languageDiagnosticsStore } from "../../diagnostics-store"; +import type { + LanguageServiceDiagnostic, + LanguageServiceDocument, + LanguageServiceProvider, + LanguageServiceProviderSummary, +} from "../../types"; +import { + absolutePathToFileUri, + fileUriToAbsolutePath, + lspSeverityToLanguageServiceSeverity, + toRelativeWorkspacePath, +} from "../../utils"; + +type OpenDocumentEntry = { + languageId: string; + version: number; + content: string; +}; + +type WorkspaceState = { + documents: Map; + lastError: string | null; +}; + +const KNOWN_JSON_SCHEMAS = [ + { + uri: "https://json.schemastore.org/package.json", + fileMatch: ["package.json"], + }, + { + uri: "https://json.schemastore.org/tsconfig.json", + fileMatch: ["tsconfig.json", "tsconfig.*.json"], + }, + { + uri: "https://json.schemastore.org/jsconfig.json", + fileMatch: ["jsconfig.json"], + }, + { + uri: "https://json.schemastore.org/bunfig.json", + fileMatch: ["bunfig.json", "bunfig.*.json"], + }, + { + uri: "https://json.schemastore.org/turbo.json", + fileMatch: ["turbo.json"], + }, +]; + +export class JsonLanguageProvider implements LanguageServiceProvider { + readonly id = "json"; + + readonly label = "JSON"; + + readonly description = + "JSON and JSONC diagnostics via vscode-json-languageservice."; + + readonly languageIds = ["json", "jsonc"]; + + private readonly workspaces = new Map(); + + private readonly jsonService = getLanguageService({ + schemaRequestService: async (uri) => { + if (uri.startsWith("file://")) { + return await fs.readFile(new URL(uri), "utf8"); + } + + const response = await fetch(uri); + if (!response.ok) { + throw new Error(`Failed to load schema: ${uri} (${response.status})`); + } + + return await response.text(); + }, + }); + + constructor() { + this.jsonService.configure({ + validate: true, + allowComments: false, + schemas: KNOWN_JSON_SCHEMAS, + }); + } + + supportsLanguage(languageId: string): boolean { + return languageId === "json" || languageId === "jsonc"; + } + + async openDocument(document: LanguageServiceDocument): Promise { + const workspaceState = this.getOrCreateWorkspaceState(document.workspaceId); + workspaceState.documents.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + }); + await this.validateDocument(document, workspaceState); + } + + async changeDocument(document: LanguageServiceDocument): Promise { + const workspaceState = this.getOrCreateWorkspaceState(document.workspaceId); + workspaceState.documents.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + }); + await this.validateDocument(document, workspaceState); + } + + async closeDocument(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + }): Promise { + const workspaceState = this.workspaces.get(args.workspaceId); + if (!workspaceState) { + return; + } + + workspaceState.documents.delete(args.absolutePath); + languageDiagnosticsStore.clearFileDiagnostics( + args.workspaceId, + this.fileKey(args.absolutePath), + ); + + if (workspaceState.documents.size === 0) { + this.workspaces.delete(args.workspaceId); + } + } + + async refreshWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + const workspaceState = this.workspaces.get(args.workspaceId); + if (!workspaceState) { + return; + } + + for (const [absolutePath, entry] of workspaceState.documents.entries()) { + await this.validateDocument( + { + workspaceId: args.workspaceId, + workspacePath: args.workspacePath, + absolutePath, + languageId: entry.languageId, + content: entry.content, + version: entry.version, + }, + workspaceState, + ); + } + } + + getWorkspaceSummary(args: { + workspaceId: string; + workspacePath: string; + enabled: boolean; + }): LanguageServiceProviderSummary { + const workspaceState = this.workspaces.get(args.workspaceId); + if (!args.enabled) { + return { + providerId: this.id, + label: this.label, + status: "disabled", + details: null, + documentCount: 0, + }; + } + + if (!workspaceState) { + return { + providerId: this.id, + label: this.label, + status: "idle", + details: null, + documentCount: 0, + }; + } + + return { + providerId: this.id, + label: this.label, + status: workspaceState.lastError ? "error" : "ready", + details: workspaceState.lastError, + documentCount: workspaceState.documents.size, + }; + } + + async disposeWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + this.workspaces.delete(args.workspaceId); + } + + private getOrCreateWorkspaceState(workspaceId: string): WorkspaceState { + const existing = this.workspaces.get(workspaceId); + if (existing) { + return existing; + } + + const next: WorkspaceState = { + documents: new Map(), + lastError: null, + }; + this.workspaces.set(workspaceId, next); + return next; + } + + private async validateDocument( + document: LanguageServiceDocument, + workspaceState: WorkspaceState, + ): Promise { + try { + const textDocument = TextDocument.create( + absolutePathToFileUri(document.absolutePath), + document.languageId, + document.version, + document.content, + ); + const jsonDocument = this.jsonService.parseJSONDocument(textDocument); + const diagnostics = await this.jsonService.doValidation( + textDocument, + jsonDocument, + document.languageId === "jsonc" + ? { + comments: "ignore", + trailingCommas: "ignore", + schemaRequest: "ignore", + } + : { + comments: "error", + trailingCommas: "error", + schemaRequest: "ignore", + }, + ); + workspaceState.lastError = null; + languageDiagnosticsStore.setFileDiagnostics( + document.workspaceId, + this.fileKey(document.absolutePath), + diagnostics.map((diagnostic) => + this.mapDiagnostic( + document.workspacePath, + document.absolutePath, + diagnostic, + ), + ), + ); + } catch (error) { + workspaceState.lastError = + error instanceof Error ? error.message : String(error); + languageDiagnosticsStore.setFileDiagnostics( + document.workspaceId, + this.fileKey(document.absolutePath), + [], + ); + } + } + + private mapDiagnostic( + workspacePath: string, + absolutePath: string, + diagnostic: Diagnostic, + ): LanguageServiceDiagnostic { + return { + providerId: this.id, + source: diagnostic.source ?? "json", + absolutePath, + relativePath: toRelativeWorkspacePath(workspacePath, absolutePath), + line: diagnostic.range.start.line + 1, + column: diagnostic.range.start.character + 1, + endLine: diagnostic.range.end.line + 1, + endColumn: diagnostic.range.end.character + 1, + message: diagnostic.message, + code: diagnostic.code ?? null, + severity: lspSeverityToLanguageServiceSeverity(diagnostic.severity), + relatedInformation: + diagnostic.relatedInformation?.map((item) => { + const relatedAbsolutePath = + fileUriToAbsolutePath(item.location.uri) ?? absolutePath; + return { + absolutePath: relatedAbsolutePath, + relativePath: toRelativeWorkspacePath( + workspacePath, + relatedAbsolutePath, + ), + line: item.location.range.start.line + 1, + column: item.location.range.start.character + 1, + endLine: item.location.range.end.line + 1, + endColumn: item.location.range.end.character + 1, + message: item.message, + }; + }) ?? [], + }; + } + + private fileKey(absolutePath: string): string { + return `${this.id}::${absolutePath}`; + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/python/PythonLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/python/PythonLanguageProvider.ts new file mode 100644 index 00000000000..af985fadb49 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/python/PythonLanguageProvider.ts @@ -0,0 +1,33 @@ +import { resolveNodePackageBinCommand } from "../../lsp/command-resolvers"; +import { ExternalLspLanguageProvider } from "../../lsp/ExternalLspLanguageProvider"; + +export class PythonLanguageProvider extends ExternalLspLanguageProvider { + constructor() { + super({ + id: "python", + label: "Python", + description: "Python diagnostics via Pyright.", + languageIds: ["python"], + defaultSource: "pyright", + resolveServerCommand: async ({ workspacePath }) => + await resolveNodePackageBinCommand({ + packageName: "pyright", + binName: "pyright-langserver", + args: ["--stdio"], + cwd: workspacePath, + }), + configuration: { + python: { + analysis: { + autoSearchPaths: true, + useLibraryCodeForTypes: true, + diagnosticMode: "openFilesOnly", + }, + }, + pyright: { + disableLanguageServices: false, + }, + }, + }); + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/rust/RustLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/rust/RustLanguageProvider.ts new file mode 100644 index 00000000000..a1e9bc18a92 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/rust/RustLanguageProvider.ts @@ -0,0 +1,26 @@ +import { resolveAvailableExecutable } from "../../lsp/command-resolvers"; +import { ExternalLspLanguageProvider } from "../../lsp/ExternalLspLanguageProvider"; + +export class RustLanguageProvider extends ExternalLspLanguageProvider { + constructor() { + super({ + id: "rust", + label: "Rust", + description: "Rust diagnostics via rust-analyzer.", + languageIds: ["rust"], + defaultSource: "rust-analyzer", + resolveServerCommand: () => + resolveAvailableExecutable([ + { + command: + process.platform === "win32" + ? "rust-analyzer.exe" + : "rust-analyzer", + args: [], + probeArgs: ["--version"], + shell: false, + }, + ]), + }); + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/toml/TomlLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/toml/TomlLanguageProvider.ts new file mode 100644 index 00000000000..4d73136bc66 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/toml/TomlLanguageProvider.ts @@ -0,0 +1,270 @@ +import fs from "node:fs/promises"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import { TextDecoder } from "node:util"; +import { type LintError, Taplo } from "@taplo/lib"; +import { languageDiagnosticsStore } from "../../diagnostics-store"; +import type { + LanguageServiceDiagnostic, + LanguageServiceDocument, + LanguageServiceProvider, + LanguageServiceProviderSummary, +} from "../../types"; +import { offsetToLineColumn, toRelativeWorkspacePath } from "../../utils"; + +type OpenDocumentEntry = { + languageId: string; + version: number; + content: string; +}; + +type WorkspaceState = { + documents: Map; + taploPromise: Promise; + lastError: string | null; +}; + +const decoder = new TextDecoder(); + +function createTaploInstance(workspacePath: string): Promise { + return Taplo.initialize({ + cwd: () => workspacePath, + envVar: (key) => process.env[key] ?? "", + envVars: () => + Object.entries(process.env).filter( + (entry): entry is [string, string] => typeof entry[1] === "string", + ), + findConfigFile: () => undefined, + glob: () => [], + isAbsolute: (candidate) => path.isAbsolute(candidate), + now: () => new Date(), + readFile: async (target) => await fs.readFile(target), + writeFile: async () => { + throw new Error("Taplo writeFile is not implemented"); + }, + stderr: async (chunk) => { + console.error( + "[language-services/toml] taplo stderr", + decoder.decode(chunk), + ); + return chunk.length; + }, + stdErrAtty: () => false, + stdin: async () => { + throw new Error("Taplo stdin is not implemented"); + }, + stdout: async (chunk) => chunk.length, + urlToFilePath: (uri) => fileURLToPath(uri), + }); +} + +export class TomlLanguageProvider implements LanguageServiceProvider { + readonly id = "toml"; + + readonly label = "TOML"; + + readonly description = "TOML diagnostics via Taplo."; + + readonly languageIds = ["toml"]; + + private readonly workspaces = new Map(); + + supportsLanguage(languageId: string): boolean { + return languageId === "toml"; + } + + async openDocument(document: LanguageServiceDocument): Promise { + const workspaceState = this.getOrCreateWorkspaceState( + document.workspaceId, + document.workspacePath, + ); + workspaceState.documents.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + }); + await this.validateDocument(document, workspaceState); + } + + async changeDocument(document: LanguageServiceDocument): Promise { + const workspaceState = this.getOrCreateWorkspaceState( + document.workspaceId, + document.workspacePath, + ); + workspaceState.documents.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + }); + await this.validateDocument(document, workspaceState); + } + + async closeDocument(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + }): Promise { + const workspaceState = this.workspaces.get(args.workspaceId); + if (!workspaceState) { + return; + } + + workspaceState.documents.delete(args.absolutePath); + languageDiagnosticsStore.clearFileDiagnostics( + args.workspaceId, + this.fileKey(args.absolutePath), + ); + + if (workspaceState.documents.size === 0) { + this.workspaces.delete(args.workspaceId); + } + } + + async refreshWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + const workspaceState = this.workspaces.get(args.workspaceId); + if (!workspaceState) { + return; + } + + for (const [absolutePath, entry] of workspaceState.documents.entries()) { + await this.validateDocument( + { + workspaceId: args.workspaceId, + workspacePath: args.workspacePath, + absolutePath, + languageId: entry.languageId, + content: entry.content, + version: entry.version, + }, + workspaceState, + ); + } + } + + getWorkspaceSummary(args: { + workspaceId: string; + workspacePath: string; + enabled: boolean; + }): LanguageServiceProviderSummary { + const workspaceState = this.workspaces.get(args.workspaceId); + if (!args.enabled) { + return { + providerId: this.id, + label: this.label, + status: "disabled", + details: null, + documentCount: 0, + }; + } + + if (!workspaceState) { + return { + providerId: this.id, + label: this.label, + status: "idle", + details: null, + documentCount: 0, + }; + } + + return { + providerId: this.id, + label: this.label, + status: workspaceState.lastError ? "error" : "ready", + details: workspaceState.lastError, + documentCount: workspaceState.documents.size, + }; + } + + async disposeWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + this.workspaces.delete(args.workspaceId); + } + + private getOrCreateWorkspaceState( + workspaceId: string, + workspacePath: string, + ): WorkspaceState { + const existing = this.workspaces.get(workspaceId); + if (existing) { + return existing; + } + + const next: WorkspaceState = { + documents: new Map(), + taploPromise: createTaploInstance(workspacePath), + lastError: null, + }; + this.workspaces.set(workspaceId, next); + return next; + } + + private async validateDocument( + document: LanguageServiceDocument, + workspaceState: WorkspaceState, + ): Promise { + try { + const taplo = await workspaceState.taploPromise; + const result = await taplo.lint(document.content); + workspaceState.lastError = null; + languageDiagnosticsStore.setFileDiagnostics( + document.workspaceId, + this.fileKey(document.absolutePath), + result.errors.map((error) => this.mapDiagnostic(document, error)), + ); + } catch (error) { + workspaceState.lastError = + error instanceof Error ? error.message : String(error); + languageDiagnosticsStore.setFileDiagnostics( + document.workspaceId, + this.fileKey(document.absolutePath), + [], + ); + } + } + + private mapDiagnostic( + document: LanguageServiceDocument, + error: LintError, + ): LanguageServiceDiagnostic { + const byteRange = error.range as + | { + start?: number; + end?: number; + } + | undefined; + const start = offsetToLineColumn( + document.content, + byteRange?.start ?? null, + ); + const end = offsetToLineColumn(document.content, byteRange?.end ?? null); + + return { + providerId: this.id, + source: "toml", + absolutePath: document.absolutePath, + relativePath: toRelativeWorkspacePath( + document.workspacePath, + document.absolutePath, + ), + line: start.line, + column: start.column, + endLine: end.line, + endColumn: end.column, + message: error.error, + code: null, + severity: "error", + relatedInformation: [], + }; + } + + private fileKey(absolutePath: string): string { + return `${this.id}::${absolutePath}`; + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/typescript/TypeScriptLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/typescript/TypeScriptLanguageProvider.ts new file mode 100644 index 00000000000..c0a620653e0 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/typescript/TypeScriptLanguageProvider.ts @@ -0,0 +1,1115 @@ +import { type ChildProcessWithoutNullStreams, spawn } from "node:child_process"; +import fs from "node:fs"; +import { createRequire } from "node:module"; +import path from "node:path"; +import { SessionDisposedError } from "lib/errors"; +import { resolveShikiLanguageFromFilePath } from "shared/language-registry"; +import { languageDiagnosticsStore } from "../../diagnostics-store"; +import type { + LanguageServiceCallHierarchyItem, + LanguageServiceDiagnostic, + LanguageServiceDocument, + LanguageServiceHover, + LanguageServiceIncomingCall, + LanguageServiceLocation, + LanguageServiceMarkupContent, + LanguageServiceProvider, + LanguageServiceProviderSummary, + LanguageServiceRange, + LanguageServiceRelatedInformation, + LanguageServiceSeverity, +} from "../../types"; + +const require = createRequire(import.meta.url); + +type TsServerRequest = { + seq: number; + type: "request"; + command: string; + arguments?: unknown; +}; + +type TsServerEvent = { + type: "event"; + event: string; + body?: unknown; +}; + +type TsServerResponse = { + type: "response"; + request_seq: number; + success: boolean; + command: string; + body?: unknown; + message?: string; +}; + +type TsServerMessage = TsServerEvent | TsServerResponse; + +type TsServerDiagnostic = { + start?: { line: number; offset: number }; + end?: { line: number; offset: number }; + text?: string; + message?: string; + code?: number; + category?: string; + relatedInformation?: Array<{ + span?: { + file?: string; + start?: { line: number; offset: number }; + end?: { line: number; offset: number }; + }; + message?: string; + text?: string; + }>; +}; + +type DiagnosticBucketKey = "syntax" | "semantic" | "suggestion" | "config"; + +type FileDiagnosticBuckets = { + syntax: LanguageServiceDiagnostic[]; + semantic: LanguageServiceDiagnostic[]; + suggestion: LanguageServiceDiagnostic[]; + config: LanguageServiceDiagnostic[]; +}; + +type OpenDocumentEntry = { + languageId: string; + version: number; + content: string; +}; + +type TsServerTextPart = + | string + | { + text?: string; + }; + +type TsServerFileSpan = { + file: string; + start: { line: number; offset: number }; + end: { line: number; offset: number }; +}; + +type TsServerQuickInfoResponse = { + displayString?: string; + documentation?: TsServerTextPart[] | string; + tags?: Array<{ + name?: string; + text?: TsServerTextPart[] | string; + }>; + start?: { line: number; offset: number }; + end?: { line: number; offset: number }; +}; + +type WorkspaceSession = { + workspaceId: string; + workspacePath: string; + tsserverPath: string; + process: ChildProcessWithoutNullStreams; + seq: number; + buffer: string; + requestResolvers: Map< + number, + { + resolve: (value: TsServerResponse) => void; + reject: (error: Error) => void; + } + >; + openDocuments: Map; + diagnosticBuckets: Map; + getErrTimer: ReturnType | null; + lastError: string | null; +}; + +function createEmptyBuckets(): FileDiagnosticBuckets { + return { + syntax: [], + semantic: [], + suggestion: [], + config: [], + }; +} + +function tryConsumeContentLengthMessage( + buffer: string, +): { body: string; rest: string } | null { + const normalizedBuffer = buffer.replace(/^(?:\r?\n)+/, ""); + if (normalizedBuffer !== buffer) { + return tryConsumeContentLengthMessage(normalizedBuffer); + } + + const separatorIndex = buffer.indexOf("\r\n\r\n"); + if (separatorIndex === -1) { + return null; + } + + const header = buffer.slice(0, separatorIndex); + const contentLengthMatch = /Content-Length: (\d+)/i.exec(header); + if (!contentLengthMatch) { + return null; + } + + const contentLength = Number(contentLengthMatch[1]); + const bodyStart = separatorIndex + 4; + const bodyEnd = bodyStart + contentLength; + if (buffer.length < bodyEnd) { + return null; + } + + return { + body: buffer.slice(bodyStart, bodyEnd), + rest: buffer.slice(bodyEnd), + }; +} + +function tryConsumeLineMessage( + buffer: string, +): { body: string; rest: string } | null { + const normalizedBuffer = buffer.replace(/^(?:\r?\n)+/, ""); + if (normalizedBuffer !== buffer) { + return tryConsumeLineMessage(normalizedBuffer); + } + + if (!normalizedBuffer.trimStart().startsWith("{")) { + return null; + } + + const newlineIndex = buffer.indexOf("\n"); + if (newlineIndex === -1) { + return null; + } + + return { + body: buffer.slice(0, newlineIndex).trim(), + rest: buffer.slice(newlineIndex + 1), + }; +} + +function toRelativeWorkspacePath( + workspacePath: string, + absolutePath: string, +): string | null { + const relativePath = path.relative(workspacePath, absolutePath); + if ( + !relativePath || + relativePath.startsWith("..") || + path.isAbsolute(relativePath) + ) { + return null; + } + + return relativePath.split(path.sep).join("/"); +} + +function toSeverity(category: string | undefined): LanguageServiceSeverity { + switch (category) { + case "error": + return "error"; + case "warning": + return "warning"; + case "suggestion": + return "hint"; + default: + return "info"; + } +} + +function resolveBundledTsServerPath(): string { + return require.resolve("typescript/lib/tsserver.js"); +} + +function resolveWorkspaceTsServerPath(workspacePath: string): string | null { + const candidate = path.join( + workspacePath, + "node_modules", + "typescript", + "lib", + "tsserver.js", + ); + return fs.existsSync(candidate) ? candidate : null; +} + +function computeEndPosition(content: string): { + endLine: number; + endOffset: number; +} { + const lines = content.split(/\r\n|\r|\n/); + return { + endLine: lines.length, + endOffset: (lines.at(-1)?.length ?? 0) + 1, + }; +} + +function normalizeTsTextParts( + parts: TsServerTextPart[] | string | undefined, +): string { + if (!parts) { + return ""; + } + + if (typeof parts === "string") { + return parts; + } + + return parts + .map((part) => (typeof part === "string" ? part : (part.text ?? ""))) + .join(""); +} + +function buildMarkdownCodeFence(code: string, language: string): string { + const fence = code.includes("```") ? "````" : "```"; + return `${fence}${language}\n${code}\n${fence}`; +} + +function formatTsTagMarkdown(tag: { + name?: string; + text?: TsServerTextPart[] | string; +}): string | null { + const tagName = tag.name?.trim(); + const tagBody = normalizeTsTextParts(tag.text).trim(); + + if (!tagName && !tagBody) { + return null; + } + + if (!tagName) { + return tagBody; + } + + if (!tagBody) { + return `**@${tagName}**`; + } + + return `**@${tagName}** ${tagBody}`; +} + +function normalizeTsHoverContents( + body: TsServerQuickInfoResponse | undefined, + absolutePath: string, +): LanguageServiceMarkupContent[] { + if (!body) { + return []; + } + + const contents: LanguageServiceMarkupContent[] = []; + const signature = body.displayString?.trim(); + const documentation = normalizeTsTextParts(body.documentation).trim(); + const tagsMarkdown = (body.tags ?? []) + .map((tag) => formatTsTagMarkdown(tag)) + .filter((tag): tag is string => Boolean(tag)) + .join("\n\n"); + + const codeLanguage = resolveShikiLanguageFromFilePath(absolutePath); + if (signature) { + contents.push({ + kind: "markdown", + value: buildMarkdownCodeFence(signature, codeLanguage ?? "typescript"), + }); + } + + if (documentation) { + contents.push({ + kind: "markdown", + value: documentation, + }); + } + + if (tagsMarkdown) { + contents.push({ + kind: "markdown", + value: tagsMarkdown, + }); + } + + if (contents.length === 0) { + return []; + } + + return contents; +} + +function normalizeTsRange( + start: { line: number; offset: number } | undefined, + end: { line: number; offset: number } | undefined, +): LanguageServiceRange | null { + if (!start || !end) { + return null; + } + + return { + line: start.line, + column: start.offset, + endLine: end.line, + endColumn: end.offset, + }; +} + +function normalizeTsFileSpans(body: unknown): TsServerFileSpan[] { + if (Array.isArray(body)) { + return body as TsServerFileSpan[]; + } + + if (!body || typeof body !== "object") { + return []; + } + + const candidate = body as { + definitions?: TsServerFileSpan[]; + body?: TsServerFileSpan[]; + }; + return candidate.definitions ?? candidate.body ?? []; +} + +export class TypeScriptLanguageProvider implements LanguageServiceProvider { + readonly id = "typescript"; + + readonly label = "TypeScript"; + + readonly description = + "TypeScript, JavaScript, TSX, JSX diagnostics via tsserver."; + + readonly languageIds = [ + "typescript", + "typescriptreact", + "javascript", + "javascriptreact", + ]; + + private readonly sessions = new Map(); + + supportsLanguage(languageId: string): boolean { + return [ + "typescript", + "typescriptreact", + "javascript", + "javascriptreact", + ].includes(languageId); + } + + async openDocument(document: LanguageServiceDocument): Promise { + const session = await this.ensureSession( + document.workspaceId, + document.workspacePath, + ); + session.openDocuments.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + }); + await this.sendRequest(session, "open", { + file: document.absolutePath, + fileContent: document.content, + projectRootPath: document.workspacePath, + }); + this.scheduleGetErr(session); + } + + async changeDocument(document: LanguageServiceDocument): Promise { + const session = await this.ensureSession( + document.workspaceId, + document.workspacePath, + ); + const previous = session.openDocuments.get(document.absolutePath); + if (!previous) { + await this.openDocument(document); + return; + } + + session.openDocuments.set(document.absolutePath, { + languageId: document.languageId, + version: document.version, + content: document.content, + }); + + await this.sendRequest(session, "change", { + file: document.absolutePath, + line: 1, + offset: 1, + ...computeEndPosition(previous.content), + insertString: document.content, + }); + this.scheduleGetErr(session); + } + + async closeDocument(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) { + return; + } + + session.openDocuments.delete(args.absolutePath); + session.diagnosticBuckets.delete(args.absolutePath); + languageDiagnosticsStore.clearFileDiagnostics( + args.workspaceId, + this.fileKey(args.absolutePath), + ); + + try { + await this.sendRequest(session, "close", { + file: args.absolutePath, + }); + } catch (error) { + console.error("[language-services/typescript] Failed to close document", { + workspaceId: args.workspaceId, + absolutePath: args.absolutePath, + error, + }); + } + + if (session.openDocuments.size === 0) { + await this.disposeWorkspace(args); + return; + } + + this.scheduleGetErr(session); + } + + async refreshWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session || session.openDocuments.size === 0) { + return; + } + + this.scheduleGetErr(session, 0); + } + + getWorkspaceSummary(args: { + workspaceId: string; + workspacePath: string; + enabled: boolean; + }): LanguageServiceProviderSummary { + const session = this.sessions.get(args.workspaceId); + if (!args.enabled) { + return { + providerId: this.id, + label: this.label, + status: "disabled", + details: null, + documentCount: 0, + }; + } + + if (!session) { + return { + providerId: this.id, + label: this.label, + status: "idle", + details: null, + documentCount: 0, + }; + } + + return { + providerId: this.id, + label: this.label, + status: session.lastError ? "error" : "ready", + details: session.lastError, + documentCount: session.openDocuments.size, + }; + } + + async disposeWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) { + return; + } + + if (session.getErrTimer) { + clearTimeout(session.getErrTimer); + session.getErrTimer = null; + } + + for (const request of session.requestResolvers.values()) { + request.reject(new SessionDisposedError()); + } + session.requestResolvers.clear(); + + session.process.removeAllListeners(); + if (!session.process.killed) { + session.process.kill(); + } + + this.sessions.delete(args.workspaceId); + } + + async findReferences(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) return null; + + try { + const response = await this.sendRequest(session, "references", { + file: args.absolutePath, + line: args.line, + offset: args.column, + }); + + const refs = response.body as + | { + refs?: Array<{ + file: string; + start: { line: number; offset: number }; + end: { line: number; offset: number }; + }>; + } + | undefined; + + if (!refs?.refs) return null; + + return refs.refs.map((ref) => ({ + absolutePath: ref.file, + line: ref.start.line, + column: ref.start.offset, + endLine: ref.end.line, + endColumn: ref.end.offset, + })); + } catch { + return null; + } + } + + async getHover(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) return null; + + try { + const response = await this.sendRequest(session, "quickinfo", { + file: args.absolutePath, + line: args.line, + offset: args.column, + }); + + const body = response.body as TsServerQuickInfoResponse | undefined; + const contents = normalizeTsHoverContents(body, args.absolutePath); + if (contents.length === 0) { + return null; + } + + session.lastError = null; + return { + contents, + range: normalizeTsRange(body?.start, body?.end), + }; + } catch { + return null; + } + } + + async getDefinition(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) return null; + + try { + const response = await this.sendRequest(session, "definition", { + file: args.absolutePath, + line: args.line, + offset: args.column, + }); + + const definitions = normalizeTsFileSpans(response.body); + if (definitions.length === 0) { + return null; + } + + session.lastError = null; + return definitions.map((definition) => ({ + absolutePath: definition.file, + line: definition.start.line, + column: definition.start.offset, + endLine: definition.end.line, + endColumn: definition.end.offset, + })); + } catch { + return null; + } + } + + async prepareCallHierarchy(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) return null; + + try { + const response = await this.sendRequest(session, "prepareCallHierarchy", { + file: args.absolutePath, + line: args.line, + offset: args.column, + }); + + const items = response.body as + | Array<{ + name: string; + kind: string; + file: string; + span: { + start: { line: number; offset: number }; + end: { line: number; offset: number }; + }; + selectionSpan: { + start: { line: number; offset: number }; + end: { line: number; offset: number }; + }; + }> + | undefined; + + if (!items) return null; + + return items.map((item) => ({ + name: item.name, + kind: item.kind, + absolutePath: item.file, + line: item.span.start.line, + column: item.span.start.offset, + endLine: item.span.end.line, + endColumn: item.span.end.offset, + selectionLine: item.selectionSpan.start.line, + selectionColumn: item.selectionSpan.start.offset, + selectionEndLine: item.selectionSpan.end.line, + selectionEndColumn: item.selectionSpan.end.offset, + })); + } catch { + return null; + } + } + + async getIncomingCalls(args: { + workspaceId: string; + item: LanguageServiceCallHierarchyItem; + }): Promise { + const session = this.sessions.get(args.workspaceId); + if (!session) return null; + + try { + const response = await this.sendRequest( + session, + "provideCallHierarchyIncomingCalls", + { + file: args.item.absolutePath, + line: args.item.selectionLine, + offset: args.item.selectionColumn, + }, + ); + + const calls = response.body as + | Array<{ + from: { + name: string; + kind: string; + file: string; + span: { + start: { line: number; offset: number }; + end: { line: number; offset: number }; + }; + selectionSpan: { + start: { line: number; offset: number }; + end: { line: number; offset: number }; + }; + }; + fromSpans: Array<{ + start: { line: number; offset: number }; + end: { line: number; offset: number }; + }>; + }> + | undefined; + + if (!calls) return null; + + return calls.map((call) => ({ + from: { + name: call.from.name, + kind: call.from.kind, + absolutePath: call.from.file, + line: call.from.span.start.line, + column: call.from.span.start.offset, + endLine: call.from.span.end.line, + endColumn: call.from.span.end.offset, + selectionLine: call.from.selectionSpan.start.line, + selectionColumn: call.from.selectionSpan.start.offset, + selectionEndLine: call.from.selectionSpan.end.line, + selectionEndColumn: call.from.selectionSpan.end.offset, + }, + fromRanges: call.fromSpans.map((span) => ({ + line: span.start.line, + column: span.start.offset, + endLine: span.end.line, + endColumn: span.end.offset, + })), + })); + } catch { + return null; + } + } + + private async ensureSession( + workspaceId: string, + workspacePath: string, + ): Promise { + const existing = this.sessions.get(workspaceId); + if (existing) { + return existing; + } + + const tsserverPath = + resolveWorkspaceTsServerPath(workspacePath) ?? + resolveBundledTsServerPath(); + const child = spawn(process.execPath, [tsserverPath, "--stdio"], { + env: { + ...process.env, + ELECTRON_RUN_AS_NODE: "1", + }, + stdio: ["pipe", "pipe", "pipe"], + }); + + const session: WorkspaceSession = { + workspaceId, + workspacePath, + tsserverPath, + process: child, + seq: 0, + buffer: "", + requestResolvers: new Map(), + openDocuments: new Map(), + diagnosticBuckets: new Map(), + getErrTimer: null, + lastError: null, + }; + let isSessionClosed = false; + const closeSession = (message: string) => { + if (isSessionClosed) { + return; + } + isSessionClosed = true; + session.lastError = message; + if (session.getErrTimer) { + clearTimeout(session.getErrTimer); + session.getErrTimer = null; + } + for (const request of session.requestResolvers.values()) { + request.reject(new Error(message)); + } + session.requestResolvers.clear(); + this.sessions.delete(workspaceId); + }; + + child.stdout.setEncoding("utf8"); + child.stdout.on("data", (chunk: string) => { + this.handleStdout(session, chunk); + }); + child.stderr.setEncoding("utf8"); + child.stderr.on("data", (chunk: string) => { + console.error("[language-services/typescript] tsserver stderr", { + workspaceId, + chunk, + }); + }); + child.on("error", (error) => { + console.error("[language-services/typescript] tsserver process error", { + workspaceId, + error, + }); + closeSession( + error instanceof Error + ? `tsserver process error: ${error.message}` + : "tsserver process error", + ); + }); + child.on("exit", (code, signal) => { + closeSession( + `TypeScript server exited: ${code ?? "null"}${signal ? ` ${signal}` : ""}`, + ); + }); + + this.sessions.set(workspaceId, session); + await this.sendRequest(session, "configure", { + preferences: { + includeCompletionsForModuleExports: true, + includeCompletionsWithInsertText: true, + }, + }); + return session; + } + + private handleStdout(session: WorkspaceSession, chunk: string): void { + session.buffer += chunk; + while (true) { + const framedMessage = tryConsumeContentLengthMessage(session.buffer); + const lineMessage = + framedMessage === null ? tryConsumeLineMessage(session.buffer) : null; + const message = framedMessage ?? lineMessage; + if (!message) { + return; + } + + session.buffer = message.rest; + const body = message.body.trim(); + if (!body) { + continue; + } + + try { + const message = JSON.parse(body) as TsServerMessage; + this.handleMessage(session, message); + } catch (error) { + console.error( + "[language-services/typescript] Failed to parse tsserver payload", + { + workspaceId: session.workspaceId, + error, + body, + }, + ); + } + } + } + + private handleMessage( + session: WorkspaceSession, + message: TsServerMessage, + ): void { + if (message.type === "response") { + const resolver = session.requestResolvers.get(message.request_seq); + if (!resolver) { + return; + } + session.requestResolvers.delete(message.request_seq); + if (message.success) { + session.lastError = null; + resolver.resolve(message); + } else { + const error = new Error( + message.message ?? `tsserver command failed: ${message.command}`, + ); + session.lastError = error.message; + resolver.reject(error); + } + return; + } + + switch (message.event) { + case "syntaxDiag": + this.applyDiagnosticsEvent(session, "syntax", message.body); + return; + case "semanticDiag": + this.applyDiagnosticsEvent(session, "semantic", message.body); + return; + case "suggestionDiag": + this.applyDiagnosticsEvent(session, "suggestion", message.body); + return; + case "configFileDiag": + this.applyConfigDiagnosticsEvent(session, message.body); + return; + default: + return; + } + } + + private applyDiagnosticsEvent( + session: WorkspaceSession, + bucketKey: DiagnosticBucketKey, + body: unknown, + ): void { + const payload = body as + | { file?: string; diagnostics?: TsServerDiagnostic[] } + | undefined; + if (!payload?.file) { + return; + } + + const absolutePath = payload.file; + const buckets = + session.diagnosticBuckets.get(absolutePath) ?? createEmptyBuckets(); + buckets[bucketKey] = (payload.diagnostics ?? []).map((diagnostic) => + this.mapDiagnostic(session.workspacePath, absolutePath, diagnostic), + ); + session.diagnosticBuckets.set(absolutePath, buckets); + this.publishDiagnostics(session, absolutePath, buckets); + } + + private applyConfigDiagnosticsEvent( + session: WorkspaceSession, + body: unknown, + ): void { + const payload = body as + | { + triggerFile?: string; + configFile?: string; + diagnostics?: TsServerDiagnostic[]; + } + | undefined; + const absolutePath = payload?.configFile ?? payload?.triggerFile; + if (!absolutePath) { + return; + } + if (!payload) { + return; + } + + const buckets = + session.diagnosticBuckets.get(absolutePath) ?? createEmptyBuckets(); + buckets.config = (payload.diagnostics ?? []).map((diagnostic) => + this.mapDiagnostic(session.workspacePath, absolutePath, diagnostic), + ); + session.diagnosticBuckets.set(absolutePath, buckets); + this.publishDiagnostics(session, absolutePath, buckets); + } + + private publishDiagnostics( + session: WorkspaceSession, + absolutePath: string, + buckets: FileDiagnosticBuckets, + ): void { + const diagnostics = [ + ...buckets.syntax, + ...buckets.semantic, + ...buckets.suggestion, + ...buckets.config, + ]; + languageDiagnosticsStore.setFileDiagnostics( + session.workspaceId, + this.fileKey(absolutePath), + diagnostics, + ); + } + + private mapDiagnostic( + workspacePath: string, + absolutePath: string, + diagnostic: TsServerDiagnostic, + ): LanguageServiceDiagnostic { + const relatedInformation = diagnostic.relatedInformation + ?.map((item) => + this.mapRelatedInformation(workspacePath, absolutePath, item), + ) + .filter( + (item): item is LanguageServiceRelatedInformation => item !== null, + ); + + return { + providerId: this.id, + source: "typescript", + absolutePath, + relativePath: toRelativeWorkspacePath(workspacePath, absolutePath), + line: diagnostic.start?.line ?? null, + column: diagnostic.start?.offset ?? null, + endLine: diagnostic.end?.line ?? null, + endColumn: diagnostic.end?.offset ?? null, + message: + diagnostic.text ?? diagnostic.message ?? "Unknown TypeScript error", + code: diagnostic.code ?? null, + severity: toSeverity(diagnostic.category), + relatedInformation, + }; + } + + private mapRelatedInformation( + workspacePath: string, + fallbackAbsolutePath: string, + item: NonNullable[number], + ): LanguageServiceRelatedInformation | null { + const absolutePath = item.span?.file ?? fallbackAbsolutePath; + const message = item.text ?? item.message ?? ""; + if (!message) { + return null; + } + + return { + absolutePath, + relativePath: toRelativeWorkspacePath(workspacePath, absolutePath), + line: item.span?.start?.line ?? null, + column: item.span?.start?.offset ?? null, + endLine: item.span?.end?.line ?? null, + endColumn: item.span?.end?.offset ?? null, + message, + }; + } + + private scheduleGetErr(session: WorkspaceSession, delay = 150): void { + if (session.getErrTimer) { + clearTimeout(session.getErrTimer); + } + + session.getErrTimer = setTimeout(() => { + session.getErrTimer = null; + if (session.openDocuments.size === 0) { + return; + } + + void this.sendRequest(session, "geterr", { + files: Array.from(session.openDocuments.keys()), + delay: 0, + }).catch((error) => { + session.lastError = + error instanceof Error ? error.message : String(error); + console.error("[language-services/typescript] geterr failed", { + workspaceId: session.workspaceId, + error, + }); + }); + }, delay); + } + + private async sendRequest( + session: WorkspaceSession, + command: string, + args?: unknown, + ): Promise { + const seq = ++session.seq; + const payload: TsServerRequest = { + seq, + type: "request", + command, + arguments: args, + }; + const content = `${JSON.stringify(payload)}\n`; + + return await new Promise((resolve, reject) => { + session.requestResolvers.set(seq, { resolve, reject }); + session.process.stdin.write(content, "utf8", (error) => { + if (!error) { + return; + } + + session.requestResolvers.delete(seq); + reject(error); + }); + }); + } + + private fileKey(absolutePath: string): string { + return `${this.id}::${absolutePath}`; + } +} diff --git a/apps/desktop/src/main/lib/language-services/providers/yaml/YamlLanguageProvider.ts b/apps/desktop/src/main/lib/language-services/providers/yaml/YamlLanguageProvider.ts new file mode 100644 index 00000000000..98080c74afe --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/providers/yaml/YamlLanguageProvider.ts @@ -0,0 +1,32 @@ +import { resolveNodePackageBinCommand } from "../../lsp/command-resolvers"; +import { ExternalLspLanguageProvider } from "../../lsp/ExternalLspLanguageProvider"; + +export class YamlLanguageProvider extends ExternalLspLanguageProvider { + constructor() { + super({ + id: "yaml", + label: "YAML", + description: "YAML diagnostics via yaml-language-server.", + languageIds: ["yaml"], + defaultSource: "yaml", + resolveServerCommand: async ({ workspacePath }) => + await resolveNodePackageBinCommand({ + packageName: "yaml-language-server", + binName: "yaml-language-server", + args: ["--stdio"], + cwd: workspacePath, + }), + configuration: { + yaml: { + validate: true, + schemaStore: { + enable: true, + url: "https://www.schemastore.org/api/json/catalog.json", + }, + hover: false, + completion: false, + }, + }, + }); + } +} diff --git a/apps/desktop/src/main/lib/language-services/types.ts b/apps/desktop/src/main/lib/language-services/types.ts new file mode 100644 index 00000000000..9c67052776d --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/types.ts @@ -0,0 +1,210 @@ +export type LanguageServiceSeverity = "error" | "warning" | "info" | "hint"; + +export interface LanguageServiceRelatedInformation { + absolutePath: string | null; + relativePath: string | null; + line: number | null; + column: number | null; + endLine: number | null; + endColumn: number | null; + message: string; +} + +export interface LanguageServiceDocument { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + content: string; + version: number; +} + +export interface LanguageServiceDiagnostic { + providerId: string; + source: string; + absolutePath: string | null; + relativePath: string | null; + line: number | null; + column: number | null; + endLine: number | null; + endColumn: number | null; + message: string; + code: string | number | null; + severity: LanguageServiceSeverity; + relatedInformation?: LanguageServiceRelatedInformation[]; +} + +export interface LanguageServiceProviderSummary { + providerId: string; + label: string; + status: "ready" | "disabled" | "idle" | "error"; + details?: string | null; + documentCount: number; +} + +export interface LanguageServiceProviderDescriptor { + providerId: string; + label: string; + description: string; + languageIds: string[]; + enabled: boolean; +} + +export interface LanguageServiceWorkspaceSnapshot { + status: "ready"; + workspaceId: string; + workspacePath: string; + providers: LanguageServiceProviderSummary[]; + problems: LanguageServiceDiagnostic[]; + totalCount: number; + truncated: boolean; + summary: { + errorCount: number; + warningCount: number; + infoCount: number; + hintCount: number; + }; +} + +/** + * Location of a symbol reference returned by findReferences / call hierarchy. + */ +export interface LanguageServiceLocation { + absolutePath: string; + line: number; + column: number; + endLine: number; + endColumn: number; +} + +export interface LanguageServiceRange { + line: number; + column: number; + endLine: number; + endColumn: number; +} + +export interface LanguageServiceMarkupContent { + kind: "plaintext" | "markdown"; + value: string; +} + +export interface LanguageServiceHover { + contents: LanguageServiceMarkupContent[]; + range: LanguageServiceRange | null; +} + +/** + * A call hierarchy item returned by prepareCallHierarchy. + */ +export interface LanguageServiceCallHierarchyItem { + name: string; + kind: string; + absolutePath: string; + line: number; + column: number; + endLine: number; + endColumn: number; + selectionLine: number; + selectionColumn: number; + selectionEndLine: number; + selectionEndColumn: number; +} + +/** + * An incoming call hierarchy entry. + */ +export interface LanguageServiceIncomingCall { + from: LanguageServiceCallHierarchyItem; + fromRanges: Array<{ + line: number; + column: number; + endLine: number; + endColumn: number; + }>; +} + +export interface LanguageServiceProvider { + readonly id: string; + readonly label: string; + readonly description: string; + readonly languageIds: string[]; + supportsLanguage(languageId: string): boolean; + openDocument(document: LanguageServiceDocument): Promise; + changeDocument(document: LanguageServiceDocument): Promise; + closeDocument(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + }): Promise; + refreshWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise; + getWorkspaceSummary(args: { + workspaceId: string; + workspacePath: string; + enabled: boolean; + }): LanguageServiceProviderSummary; + disposeWorkspace(args: { + workspaceId: string; + workspacePath: string; + }): Promise; + + /** + * Find all references to a symbol at the given position. + * Returns null if the provider does not support this operation. + */ + findReferences?(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise; + + /** + * Get hover content for a symbol at the given position. + * Returns null if the provider does not support this operation. + */ + getHover?(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise; + + /** + * Get definitions for a symbol at the given position. + * Returns null if the provider does not support this operation. + */ + getDefinition?(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise; + + /** + * Prepare call hierarchy at the given position. + * Returns null if the provider does not support this operation. + */ + prepareCallHierarchy?(args: { + workspaceId: string; + workspacePath: string; + absolutePath: string; + line: number; + column: number; + }): Promise; + + /** + * Get incoming calls for a call hierarchy item. + */ + getIncomingCalls?(args: { + workspaceId: string; + item: LanguageServiceCallHierarchyItem; + }): Promise; +} diff --git a/apps/desktop/src/main/lib/language-services/utils.ts b/apps/desktop/src/main/lib/language-services/utils.ts new file mode 100644 index 00000000000..11db9d2f396 --- /dev/null +++ b/apps/desktop/src/main/lib/language-services/utils.ts @@ -0,0 +1,105 @@ +import path from "node:path"; +import { fileURLToPath, pathToFileURL } from "node:url"; +import type { LanguageServiceSeverity } from "./types"; + +export function toRelativeWorkspacePath( + workspacePath: string, + absolutePath: string, +): string | null { + const relativePath = path.relative(workspacePath, absolutePath); + if ( + !relativePath || + relativePath.startsWith("..") || + path.isAbsolute(relativePath) + ) { + return null; + } + + return relativePath.split(path.sep).join("/"); +} + +export function absolutePathToFileUri(absolutePath: string): string { + return pathToFileURL(absolutePath).toString(); +} + +export function fileUriToAbsolutePath(uri: string): string | null { + if (!uri.startsWith("file://")) { + return null; + } + + try { + return fileURLToPath(uri); + } catch { + return null; + } +} + +export function offsetToLineColumn( + content: string, + offset: number | null | undefined, +): { line: number | null; column: number | null } { + if (offset === null || offset === undefined || Number.isNaN(offset)) { + return { + line: null, + column: null, + }; + } + + const boundedOffset = Math.max(0, Math.min(offset, content.length)); + let line = 1; + let column = 1; + + for (let index = 0; index < boundedOffset; index += 1) { + const char = content[index]; + if (char === "\n") { + line += 1; + column = 1; + continue; + } + + if (char === "\r") { + if (content[index + 1] === "\n") { + index += 1; + } + line += 1; + column = 1; + continue; + } + + column += 1; + } + + return { + line, + column, + }; +} + +export function offsetToLspPosition( + content: string, + offset: number, +): { + line: number; + character: number; +} { + const position = offsetToLineColumn(content, offset); + return { + line: Math.max((position.line ?? 1) - 1, 0), + character: Math.max((position.column ?? 1) - 1, 0), + }; +} + +export function lspSeverityToLanguageServiceSeverity( + severity: number | null | undefined, +): LanguageServiceSeverity { + switch (severity) { + case 1: + return "error"; + case 2: + return "warning"; + case 3: + return "info"; + default: + return "hint"; + } +} diff --git a/apps/desktop/src/main/lib/local-db/index.ts b/apps/desktop/src/main/lib/local-db/index.ts index cf7dc89cab1..7ce15c1850c 100644 --- a/apps/desktop/src/main/lib/local-db/index.ts +++ b/apps/desktop/src/main/lib/local-db/index.ts @@ -6,7 +6,6 @@ import * as schema from "@superset/local-db"; import Database from "better-sqlite3"; import { drizzle } from "drizzle-orm/better-sqlite3"; import { migrate } from "drizzle-orm/better-sqlite3/migrator"; -import { app } from "electron"; import { validate as uuidValidate, version as uuidVersion } from "uuid"; import { env } from "../../env.main"; import { @@ -19,6 +18,20 @@ const DB_PATH = join(SUPERSET_HOME_DIR, "local.db"); ensureSupersetHomeDirExists(); +type ElectronAppLike = Pick< + typeof import("electron").app, + "getAppPath" | "isPackaged" +>; + +function getElectronApp(): ElectronAppLike | null { + try { + const electron = require("electron") as typeof import("electron"); + return electron.app; + } catch { + return null; + } +} + /** * Gets the migrations directory path. * @@ -29,12 +42,19 @@ ensureSupersetHomeDirExists(); * - Test environment: Use monorepo path relative to __dirname */ function getMigrationsDirectory(): string { + const electronApp = getElectronApp(); + const packagedResourcesPath = process.resourcesPath + ? join(process.resourcesPath, "resources/migrations") + : null; + if (packagedResourcesPath && existsSync(packagedResourcesPath)) { + return packagedResourcesPath; + } // Check if running in Electron (app.getAppPath exists) const isElectron = - typeof app?.getAppPath === "function" && - typeof app?.isPackaged === "boolean"; + typeof electronApp?.getAppPath === "function" && + typeof electronApp?.isPackaged === "boolean"; - if (isElectron && app.isPackaged) { + if (isElectron && electronApp.isPackaged) { return join(process.resourcesPath, "resources/migrations"); } @@ -42,7 +62,7 @@ function getMigrationsDirectory(): string { if (isElectron && isDev) { // Development: source files in monorepo - return join(app.getAppPath(), "../../packages/local-db/drizzle"); + return join(electronApp.getAppPath(), "../../packages/local-db/drizzle"); } // Preview mode or test: __dirname is dist/main, so go up one level to dist/resources/migrations @@ -63,7 +83,10 @@ function getMigrationsDirectory(): string { // Try Electron app path if available if (isElectron) { - const srcPath = join(app.getAppPath(), "../../packages/local-db/drizzle"); + const srcPath = join( + electronApp.getAppPath(), + "../../packages/local-db/drizzle", + ); if (existsSync(srcPath)) { return srcPath; } @@ -103,4 +126,12 @@ try { console.log("[local-db] Migrations complete"); +export function closeLocalDb(): void { + try { + sqlite.close(); + } catch (error) { + console.error("[local-db] Failed to close database:", error); + } +} + export type LocalDb = typeof localDb; diff --git a/apps/desktop/src/main/lib/menu-events.ts b/apps/desktop/src/main/lib/menu-events.ts index 7798b468b57..aa2dfe21021 100644 --- a/apps/desktop/src/main/lib/menu-events.ts +++ b/apps/desktop/src/main/lib/menu-events.ts @@ -1,10 +1,12 @@ import { EventEmitter } from "node:events"; +import type { BrowserShortcutAction } from "shared/browser-shortcuts"; export type SettingsSection = | "project" | "workspace" | "appearance" | "keyboard" | "behavior" + | "diagnostics" | "git" | "terminal" | "integrations"; @@ -17,4 +19,8 @@ export interface OpenWorkspaceEvent { workspaceId: string; } +export interface BrowserActionEvent { + action: BrowserShortcutAction; +} + export const menuEmitter = new EventEmitter(); diff --git a/apps/desktop/src/main/lib/menu.ts b/apps/desktop/src/main/lib/menu.ts index d1915156d46..6deeac1dca1 100644 --- a/apps/desktop/src/main/lib/menu.ts +++ b/apps/desktop/src/main/lib/menu.ts @@ -1,7 +1,8 @@ import { COMPANY } from "@superset/shared/constants"; -import { app, BrowserWindow, Menu, shell } from "electron"; +import { app, BrowserWindow, Menu, shell, webContents } from "electron"; import { env } from "main/env.main"; import { resetTerminalStateDev } from "main/lib/terminal/dev-reset"; +import type { BrowserShortcutAction } from "shared/browser-shortcuts"; import { checkForUpdatesInteractive, simulateDownloading, @@ -10,8 +11,30 @@ import { } from "./auto-updater"; import { menuEmitter } from "./menu-events"; +function getFocusedWebview() { + return webContents + .getAllWebContents() + .find((wc) => wc.getType() === "webview" && wc.isFocused()); +} + +function triggerBrowserShortcut(action: BrowserShortcutAction) { + const focusedGuest = getFocusedWebview(); + if (focusedGuest) { + if (action === "hard-reload") { + focusedGuest.reloadIgnoringCache(); + } else { + focusedGuest.reload(); + } + return; + } + + menuEmitter.emit("browser-action", action); +} + export function createApplicationMenu() { const reloadAccelerator = "CmdOrCtrl+R"; + const browserReloadAccelerator = "CmdOrCtrl+Alt+R"; + const browserHardReloadAccelerator = "CmdOrCtrl+Shift+Alt+R"; const closeAccelerator = "CmdOrCtrl+Shift+Q"; const showHotkeysAccelerator = "CmdOrCtrl+/"; const openSettingsAccelerator = "CmdOrCtrl+,"; @@ -49,6 +72,25 @@ export function createApplicationMenu() { { role: "togglefullscreen" }, ], }, + { + label: "Browser", + submenu: [ + { + label: "Reload Browser", + accelerator: browserReloadAccelerator, + click: () => { + triggerBrowserShortcut("reload"); + }, + }, + { + label: "Hard Reload Browser", + accelerator: browserHardReloadAccelerator, + click: () => { + triggerBrowserShortcut("hard-reload"); + }, + }, + ], + }, { label: "Window", submenu: [ diff --git a/apps/desktop/src/main/lib/notification-sound.ts b/apps/desktop/src/main/lib/notification-sound.ts index 9a530aa6daa..cb06ff000d9 100644 --- a/apps/desktop/src/main/lib/notification-sound.ts +++ b/apps/desktop/src/main/lib/notification-sound.ts @@ -53,10 +53,15 @@ function getSelectedRingtonePath(): string | null { /** * Plays the notification sound based on user's selected ringtone. * Uses platform-specific commands to play the audio file. + * + * `onComplete` fires when playback finishes, or immediately when playback + * is skipped (muted / no ringtone). Callers can chain follow-up audio + * (e.g. Aivis TTS) so it plays after the ringtone instead of overlapping. */ -export function playNotificationSound(): void { +export function playNotificationSound(onComplete?: () => void): void { // Check if sounds are muted if (areNotificationSoundsMuted()) { + onComplete?.(); return; } @@ -64,6 +69,7 @@ export function playNotificationSound(): void { // No sound if "none" is selected if (!soundPath) { + onComplete?.(); return; } @@ -84,5 +90,12 @@ export function playNotificationSound(): void { volume = 100; } - playSoundFile(soundPath, volume); + let done = false; + const finish = () => { + if (done) return; + done = true; + onComplete?.(); + }; + const proc = playSoundFile(soundPath, volume, { onComplete: finish }); + if (!proc) finish(); } diff --git a/apps/desktop/src/main/lib/notifications/aivis-tts.ts b/apps/desktop/src/main/lib/notifications/aivis-tts.ts new file mode 100644 index 00000000000..af7f59aee91 --- /dev/null +++ b/apps/desktop/src/main/lib/notifications/aivis-tts.ts @@ -0,0 +1,180 @@ +import { execFile } from "node:child_process"; +import { writeFile } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { settings } from "@superset/local-db"; +import { localDb } from "../local-db"; +import { playSoundFile } from "../play-sound"; + +export type AivisEventKind = "complete" | "permission"; + +export interface AivisPlaceholders { + branch?: string; + workspace?: string; + worktree?: string; + project?: string; + tab?: string; + pane?: string; + event?: string; +} + +const AIVIS_ENDPOINT = "https://api.aivis-project.com/v1/tts/synthesize"; + +export const AIVIS_PLACEHOLDER_KEYS = [ + "branch", + "workspace", + "worktree", + "project", + "tab", + "pane", + "event", +] as const satisfies readonly (keyof AivisPlaceholders)[]; + +export function renderAivisTemplate( + template: string, + vars: AivisPlaceholders, +): string { + return template.replace(/\{\{\s*(\w+)\s*\}\}/g, (_, key: string) => { + const value = vars[key as keyof AivisPlaceholders]; + return value ?? ""; + }); +} + +function readAivisSettings() { + try { + const row = localDb.select().from(settings).get(); + return { + enabled: row?.aivisEnabled ?? false, + apiKey: row?.aivisApiKey ?? "", + modelUuid: row?.aivisModelUuid ?? "", + userDictionaryUuid: row?.aivisUserDictionaryUuid ?? "", + format: row?.aivisFormat ?? "ワークスペース、{{workspace}}、です", + formatPermission: + row?.aivisFormatPermission ?? "{{branch}}で対応が必要です", + volume: + typeof row?.aivisVolume === "number" && Number.isFinite(row.aivisVolume) + ? Math.max(0, Math.min(100, row.aivisVolume)) + : 100, + speakingRate: + typeof row?.aivisSpeakingRate === "number" && + Number.isFinite(row.aivisSpeakingRate) + ? Math.max(0.5, Math.min(2.0, row.aivisSpeakingRate)) + : 1.0, + }; + } catch { + return null; + } +} + +async function synthesize( + apiKey: string, + modelUuid: string, + text: string, + userDictionaryUuid?: string, + speakingRate?: number, +): Promise { + const body: Record = { + model_uuid: modelUuid, + text, + output_format: "mp3", + }; + if (userDictionaryUuid) body.user_dictionary_uuid = userDictionaryUuid; + if (speakingRate !== undefined) body.speaking_rate = speakingRate; + + const res = await fetch(AIVIS_ENDPOINT, { + method: "POST", + headers: { + Authorization: `Bearer ${apiKey}`, + "Content-Type": "application/json", + Accept: "audio/mpeg", + }, + body: JSON.stringify(body), + }); + + if (!res.ok) { + const body = await res.text().catch(() => ""); + throw new Error( + `Aivis API error: ${res.status} ${res.statusText} ${body.slice(0, 200)}`, + ); + } + + const arrayBuffer = await res.arrayBuffer(); + return Buffer.from(arrayBuffer); +} + +function uniqueTmpPath(): string { + return join( + tmpdir(), + `superset-aivis-${Date.now()}-${Math.random().toString(36).slice(2, 8)}.mp3`, + ); +} + +function cleanup(path: string): void { + execFile("rm", ["-f", path], () => { + /* ignore */ + }); +} + +/** + * Synthesize text via Aivis API and play it. + * Called with explicit apiKey/modelUuid (used by both the test endpoint + * and the runtime notification flow). + */ +export async function playAivisTts(options: { + apiKey: string; + modelUuid: string; + text: string; + volume?: number; + speakingRate?: number; + userDictionaryUuid?: string; +}): Promise { + const trimmed = options.text.trim(); + if (!trimmed) return; + if (!options.apiKey || !options.modelUuid) { + throw new Error("Aivis API key and model UUID are required"); + } + + const audio = await synthesize( + options.apiKey, + options.modelUuid, + trimmed, + options.userDictionaryUuid, + options.speakingRate, + ); + const path = uniqueTmpPath(); + await writeFile(path, audio); + + playSoundFile(path, options.volume ?? 100, { + onComplete: () => cleanup(path), + }); +} + +/** + * Render the configured template for the given event and play it. + * No-op if aivis is disabled, not configured, or the rendered text is empty. + */ +export async function playAivisNotification( + event: AivisEventKind, + vars: AivisPlaceholders, +): Promise { + const cfg = readAivisSettings(); + if (!cfg || !cfg.enabled) return; + if (!cfg.apiKey || !cfg.modelUuid) return; + + const template = event === "permission" ? cfg.formatPermission : cfg.format; + const text = renderAivisTemplate(template, vars).trim(); + if (!text) return; + + try { + await playAivisTts({ + apiKey: cfg.apiKey, + modelUuid: cfg.modelUuid, + text, + volume: cfg.volume, + speakingRate: cfg.speakingRate, + userDictionaryUuid: cfg.userDictionaryUuid || undefined, + }); + } catch (err) { + console.warn("[aivis-tts] playback failed", err); + } +} diff --git a/apps/desktop/src/main/lib/notifications/notification-manager.ts b/apps/desktop/src/main/lib/notifications/notification-manager.ts index 3434b9ee84a..06c85791b85 100644 --- a/apps/desktop/src/main/lib/notifications/notification-manager.ts +++ b/apps/desktop/src/main/lib/notifications/notification-manager.ts @@ -21,7 +21,8 @@ export interface NotificationManagerDeps { body: string; silent: boolean; }) => NativeNotification; - playSound: () => void; + playSound: (onComplete?: () => void) => void; + playAivis?: (event: AgentLifecycleEvent) => void; onNotificationClick: (ids: NotificationIds) => void; getVisibilityContext: () => { isFocused: boolean; @@ -64,23 +65,22 @@ export class NotificationManager { const title = this.deps.getNotificationTitle(event); const isPermissionRequest = event.eventType === "PermissionRequest"; - const isPendingQuestion = event.eventType === "PendingQuestion"; const notification = this.deps.createNotification({ - title: - isPermissionRequest || isPendingQuestion - ? `Awaiting Response — ${workspaceName}` - : `Agent Complete — ${workspaceName}`, - body: - isPermissionRequest || isPendingQuestion - ? `"${title}" is waiting for your reply` - : `"${title}" has finished its task`, + title: isPermissionRequest + ? `Awaiting Response — ${workspaceName}` + : `Agent Complete — ${workspaceName}`, + body: isPermissionRequest + ? `"${title}" is waiting for your reply` + : `"${title}" has finished its task`, silent: true, }); const key = event.sessionId ?? event.paneId ?? `_anon_${this.counter++}`; this.track(key, notification); - this.deps.playSound(); + // Chain Aivis after the ringtone so the voice announcement plays + // once the notification sound finishes rather than in parallel. + this.deps.playSound(() => this.deps.playAivis?.(event)); notification.on("click", () => { this.deps.onNotificationClick({ diff --git a/apps/desktop/src/main/lib/reference-graph/graph-builder.ts b/apps/desktop/src/main/lib/reference-graph/graph-builder.ts new file mode 100644 index 00000000000..33d84b9f29c --- /dev/null +++ b/apps/desktop/src/main/lib/reference-graph/graph-builder.ts @@ -0,0 +1,366 @@ +import fs from "node:fs/promises"; +import path from "node:path"; +import { resolveReferenceGraphLanguageId } from "shared/language-registry"; +import { languageServiceManager } from "../language-services/manager"; +import type { + LanguageServiceCallHierarchyItem, + LanguageServiceLocation, +} from "../language-services/types"; +import { toRelativeWorkspacePath } from "../language-services/utils"; +import type { + ReferenceGraphData, + ReferenceGraphEdge, + ReferenceGraphNode, + ReferenceGraphRequest, +} from "./types"; + +const CONTEXT_LINES = 3; + +function makeNodeId(absolutePath: string, line: number, column: number) { + return `${absolutePath}:${line}:${column}`; +} + +function getLanguageIdFromPath(filePath: string): string { + return resolveReferenceGraphLanguageId(filePath); +} + +async function getCodeSnippet( + absolutePath: string, + line: number, + endLine: number, +): Promise<{ snippet: string; startLine: number } | null> { + try { + const content = await fs.readFile(absolutePath, "utf8"); + const lines = content.split("\n"); + const startLine = Math.max(0, line - 1 - CONTEXT_LINES); + const finalLine = Math.min(lines.length, endLine + CONTEXT_LINES); + const snippet = lines.slice(startLine, finalLine).join("\n"); + return { snippet, startLine: startLine + 1 }; + } catch { + return null; + } +} + +/** + * Check if a file path should be excluded from the graph. + * Patterns are matched against path segments — e.g. "node_modules" + * matches any path containing a "node_modules" directory segment. + */ +function shouldExclude( + absolutePath: string, + workspacePath: string, + excludePatterns: string[], +): boolean { + const relative = path.relative(workspacePath, absolutePath); + const segments = relative.split(path.sep); + for (const pattern of excludePatterns) { + // Extract the directory name from glob patterns like "**/node_modules/**" + const dirName = pattern.replace(/\*\*\//g, "").replace(/\/\*\*/g, ""); + if (segments.includes(dirName)) { + return true; + } + } + return false; +} + +export async function buildReferenceGraph( + request: ReferenceGraphRequest, +): Promise { + const maxDepth = request.maxDepth ?? 3; + const maxNodes = request.maxNodes ?? 100; + const excludePatterns = request.excludePatterns ?? [ + "**/node_modules/**", + "**/dist/**", + "**/.git/**", + ]; + + const nodes = new Map(); + const edges = new Map(); + + // Try call hierarchy first (works for functions/methods) + const callHierarchyItems = await languageServiceManager.prepareCallHierarchy({ + workspaceId: request.workspaceId, + workspacePath: request.workspacePath, + absolutePath: request.absolutePath, + languageId: request.languageId, + line: request.line, + column: request.column, + }); + + if (callHierarchyItems && callHierarchyItems.length > 0) { + // Build from call hierarchy + const rootItem = callHierarchyItems[0]; + const rootNodeId = makeNodeId( + rootItem.absolutePath, + rootItem.line, + rootItem.column, + ); + await addNodeFromCallHierarchyItem( + nodes, + rootItem, + rootNodeId, + request.workspacePath, + true, + 0, + ); + + await buildCallHierarchyGraph( + request, + rootItem, + rootNodeId, + nodes, + edges, + 1, + maxDepth, + maxNodes, + excludePatterns, + ); + } else { + // Fall back to references + const rootNodeId = makeNodeId( + request.absolutePath, + request.line, + request.column, + ); + const snippet = await getCodeSnippet( + request.absolutePath, + request.line, + request.line, + ); + nodes.set(rootNodeId, { + id: rootNodeId, + name: "Symbol", + kind: "unknown", + absolutePath: request.absolutePath, + relativePath: toRelativeWorkspacePath( + request.workspacePath, + request.absolutePath, + ), + line: request.line, + column: request.column, + endLine: request.line, + endColumn: request.column, + codeSnippet: snippet?.snippet ?? "", + languageId: getLanguageIdFromPath(request.absolutePath), + snippetStartLine: snippet?.startLine ?? request.line, + isRoot: true, + depth: 0, + }); + + await buildReferencesGraph( + request, + rootNodeId, + nodes, + edges, + 1, + maxDepth, + maxNodes, + excludePatterns, + ); + } + + return { + nodes: Array.from(nodes.values()), + edges: Array.from(edges.values()), + }; +} + +async function buildCallHierarchyGraph( + request: ReferenceGraphRequest, + item: LanguageServiceCallHierarchyItem, + nodeId: string, + nodes: Map, + edges: Map, + currentDepth: number, + maxDepth: number, + maxNodes: number, + excludePatterns: string[], +): Promise { + if (currentDepth > maxDepth || nodes.size >= maxNodes) return; + + const incomingCalls = await languageServiceManager.getIncomingCalls({ + workspaceId: request.workspaceId, + languageId: request.languageId, + item, + }); + + if (!incomingCalls) return; + + const pendingItems: Array<{ + item: LanguageServiceCallHierarchyItem; + nodeId: string; + }> = []; + + for (const call of incomingCalls) { + if (nodes.size >= maxNodes) break; + if ( + shouldExclude( + call.from.absolutePath, + request.workspacePath, + excludePatterns, + ) + ) + continue; + + const callerNodeId = makeNodeId( + call.from.absolutePath, + call.from.line, + call.from.column, + ); + + if (!nodes.has(callerNodeId)) { + await addNodeFromCallHierarchyItem( + nodes, + call.from, + callerNodeId, + request.workspacePath, + false, + currentDepth, + ); + pendingItems.push({ item: call.from, nodeId: callerNodeId }); + } + + const edgeId = `${callerNodeId}->${nodeId}`; + if (!edges.has(edgeId)) { + edges.set(edgeId, { + id: edgeId, + source: callerNodeId, + target: nodeId, + }); + } + } + + // Recurse into callers sequentially to respect maxNodes budget + for (const { item, nodeId: callerId } of pendingItems) { + if (nodes.size >= maxNodes) break; + await buildCallHierarchyGraph( + request, + item, + callerId, + nodes, + edges, + currentDepth + 1, + maxDepth, + maxNodes, + excludePatterns, + ); + } +} + +async function buildReferencesGraph( + request: ReferenceGraphRequest, + rootNodeId: string, + nodes: Map, + edges: Map, + currentDepth: number, + maxDepth: number, + maxNodes: number, + excludePatterns: string[], +): Promise { + if (currentDepth > maxDepth || nodes.size >= maxNodes) return; + + const rootNode = nodes.get(rootNodeId); + if (!rootNode) return; + + const references = await languageServiceManager.findReferences({ + workspaceId: request.workspaceId, + workspacePath: request.workspacePath, + absolutePath: rootNode.absolutePath, + languageId: request.languageId, + line: rootNode.line, + column: rootNode.column, + }); + + if (!references) return; + + for (const ref of references) { + if (nodes.size >= maxNodes) break; + if (shouldExclude(ref.absolutePath, request.workspacePath, excludePatterns)) + continue; + + const refNodeId = makeNodeId(ref.absolutePath, ref.line, ref.column); + + // Skip self-references + if (refNodeId === rootNodeId) continue; + + if (!nodes.has(refNodeId)) { + await addNodeFromLocation( + nodes, + ref, + refNodeId, + request.workspacePath, + currentDepth, + ); + } + + const edgeId = `${rootNodeId}->${refNodeId}`; + if (!edges.has(edgeId)) { + edges.set(edgeId, { + id: edgeId, + source: rootNodeId, + target: refNodeId, + }); + } + } +} + +async function addNodeFromCallHierarchyItem( + nodes: Map, + item: LanguageServiceCallHierarchyItem, + nodeId: string, + workspacePath: string, + isRoot: boolean, + depth: number, +): Promise { + const snippet = await getCodeSnippet( + item.absolutePath, + item.line, + item.endLine, + ); + nodes.set(nodeId, { + id: nodeId, + name: item.name, + kind: item.kind, + absolutePath: item.absolutePath, + relativePath: toRelativeWorkspacePath(workspacePath, item.absolutePath), + line: item.line, + column: item.column, + endLine: item.endLine, + endColumn: item.endColumn, + codeSnippet: snippet?.snippet ?? "", + languageId: getLanguageIdFromPath(item.absolutePath), + snippetStartLine: snippet?.startLine ?? item.line, + isRoot, + depth, + }); +} + +async function addNodeFromLocation( + nodes: Map, + location: LanguageServiceLocation, + nodeId: string, + workspacePath: string, + depth: number, +): Promise { + const snippet = await getCodeSnippet( + location.absolutePath, + location.line, + location.endLine, + ); + nodes.set(nodeId, { + id: nodeId, + name: path.basename(location.absolutePath), + kind: "reference", + absolutePath: location.absolutePath, + relativePath: toRelativeWorkspacePath(workspacePath, location.absolutePath), + line: location.line, + column: location.column, + endLine: location.endLine, + endColumn: location.endColumn, + codeSnippet: snippet?.snippet ?? "", + languageId: getLanguageIdFromPath(location.absolutePath), + snippetStartLine: snippet?.startLine ?? location.line, + isRoot: false, + depth, + }); +} diff --git a/apps/desktop/src/main/lib/reference-graph/index.ts b/apps/desktop/src/main/lib/reference-graph/index.ts new file mode 100644 index 00000000000..7db1186824d --- /dev/null +++ b/apps/desktop/src/main/lib/reference-graph/index.ts @@ -0,0 +1,7 @@ +export { buildReferenceGraph } from "./graph-builder"; +export type { + ReferenceGraphData, + ReferenceGraphEdge, + ReferenceGraphNode, + ReferenceGraphRequest, +} from "./types"; diff --git a/apps/desktop/src/main/lib/reference-graph/types.ts b/apps/desktop/src/main/lib/reference-graph/types.ts new file mode 100644 index 00000000000..488307e66cd --- /dev/null +++ b/apps/desktop/src/main/lib/reference-graph/types.ts @@ -0,0 +1,58 @@ +/** + * Types for the reference graph feature. + * Shared between main process (graph building) and renderer (visualization). + */ + +export interface ReferenceGraphNode { + id: string; + name: string; + /** Symbol kind (function, class, variable, etc.) */ + kind: string; + absolutePath: string; + relativePath: string | null; + line: number; + column: number; + endLine: number; + endColumn: number; + /** Code snippet with context lines */ + codeSnippet: string; + /** Language ID for syntax highlighting */ + languageId: string; + /** Starting line number of the snippet in the file */ + snippetStartLine: number; + /** Whether this is the root node (the queried symbol) */ + isRoot: boolean; + /** Depth in the graph from root */ + depth: number; +} + +export interface ReferenceGraphEdge { + id: string; + source: string; + target: string; +} + +export interface ReferenceGraphData { + nodes: ReferenceGraphNode[]; + edges: ReferenceGraphEdge[]; +} + +export interface ReferenceGraphRequest { + workspaceId: string; + workspacePath: string; + absolutePath: string; + languageId: string; + line: number; + column: number; + /** Max recursion depth (default 3) */ + maxDepth?: number; + /** Max total nodes (default 100) */ + maxNodes?: number; + /** + * Directory name segments to exclude from the graph. + * Glob-style patterns like "** /node_modules/**" are supported — the + * directory name is extracted and matched against path segments. + * Default: ["node_modules", "dist", ".git"] + */ + excludePatterns?: string[]; +} diff --git a/apps/desktop/src/main/lib/report-error.ts b/apps/desktop/src/main/lib/report-error.ts new file mode 100644 index 00000000000..010468e8cb0 --- /dev/null +++ b/apps/desktop/src/main/lib/report-error.ts @@ -0,0 +1,43 @@ +import * as Sentry from "@sentry/electron/main"; + +type Severity = "fatal" | "error" | "warning" | "info" | "debug"; + +interface ReportErrorOptions { + /** Sentry severity level. Defaults to "error". */ + severity?: Severity; + /** Tags for grouping in the Sentry dashboard (e.g. { subsystem: "daemon" }). */ + tags?: Record; + /** Additional context shown alongside the event. */ + context?: Record; + /** Which Sentry "fingerprint" bucket to group into. Override when the default stack-based grouping clumps unrelated issues together. */ + fingerprint?: string[]; +} + +/** + * Reports an error to Sentry from the main process. + * + * Prefer this over raw `Sentry.captureException` for non-tRPC paths, so we get + * consistent severity/tags/context and a single seam to change filtering later. + */ +export function reportError( + error: unknown, + options: ReportErrorOptions = {}, +): void { + const { severity = "error", tags, context, fingerprint } = options; + + Sentry.withScope((scope) => { + scope.setLevel(severity); + if (tags) { + for (const [k, v] of Object.entries(tags)) { + scope.setTag(k, v); + } + } + if (context) { + scope.setContext("details", context); + } + if (fingerprint) { + scope.setFingerprint(fingerprint); + } + Sentry.captureException(error); + }); +} diff --git a/apps/desktop/src/main/lib/service-status/index.ts b/apps/desktop/src/main/lib/service-status/index.ts new file mode 100644 index 00000000000..ee7f82c1c84 --- /dev/null +++ b/apps/desktop/src/main/lib/service-status/index.ts @@ -0,0 +1,246 @@ +import { EventEmitter } from "node:events"; +import { app, net } from "electron"; +import { + createUnknownSnapshot, + indicatorToLevel, + SERVICE_STATUS_DEFINITIONS, + type ServiceStatusDefinition, + type ServiceStatusId, + type ServiceStatusSnapshot, + type StatuspageIndicator, +} from "shared/service-status-types"; + +const POLL_INTERVAL_MS = 5 * 60 * 1000; +const REQUEST_TIMEOUT_MS = 10_000; +// Focus-driven refresh is debounced: if the last successful refresh attempt +// was within this window we skip rather than hammering the API on every +// window/tab switch. +const FOCUS_REFRESH_MIN_INTERVAL_MS = 30_000; + +type StatuspageResponse = { + status?: { indicator?: StatuspageIndicator; description?: string }; +}; + +class ServiceStatusService extends EventEmitter { + private snapshots = new Map(); + private pollTimer: ReturnType | null = null; + private started = false; + private lastRefreshAt = 0; + // Re-entry guard: ensures start()'s initial refresh and a concurrent + // focus-driven refresh share a single fetch round instead of racing. + private inflightRefresh: Promise | null = null; + + constructor() { + super(); + // Multiple renderers (main window + any tearoff) can each subscribe to + // the emitter via tRPC; bump the default cap so dev HMR and StrictMode + // remounts don't trip the listener-warning heuristic. + this.setMaxListeners(20); + for (const def of SERVICE_STATUS_DEFINITIONS) { + this.snapshots.set(def.id, createUnknownSnapshot(def)); + } + } + + start(): void { + if (this.started) return; + this.started = true; + void this.refreshAll(); + this.pollTimer = setInterval(() => { + void this.refreshAll(); + }, POLL_INTERVAL_MS); + // Don't keep the event loop alive just for status polling. + this.pollTimer.unref(); + } + + stop(): void { + if (this.pollTimer) clearInterval(this.pollTimer); + this.pollTimer = null; + this.started = false; + } + + getAll(): ServiceStatusSnapshot[] { + return SERVICE_STATUS_DEFINITIONS.map( + (def) => this.snapshots.get(def.id) ?? createUnknownSnapshot(def), + ); + } + + /** + * Refresh only when the last refresh is older than the given threshold. + * Used for focus-driven refreshes so rapid window switches don't produce + * a fetch storm. + */ + refreshIfStale(thresholdMs = FOCUS_REFRESH_MIN_INTERVAL_MS): void { + if (Date.now() - this.lastRefreshAt < thresholdMs) return; + void this.refreshAll(); + } + + refreshAll(): Promise { + // Collapse concurrent callers onto the same fetch round. The initial + // start() refresh is async and can overlap with a focus-driven + // refreshIfStale() that passes the 30-second check because + // lastRefreshAt is still 0 — without this guard we'd fire the full + // fetch twice on every cold start. + if (this.inflightRefresh) return this.inflightRefresh; + this.inflightRefresh = this.doRefreshAll().finally(() => { + this.inflightRefresh = null; + }); + return this.inflightRefresh; + } + + private async doRefreshAll(): Promise { + // Skip fetching when offline, but still push an "offline" snapshot so + // the UI doesn't keep rendering a stale green dot from the last + // successful poll. net.isOnline() reflects Chromium's connectivity + // state — accurate enough to avoid guaranteed-failure polls on + // planes / disconnected laptops while still running when the OS is + // on a captive-portal / proxy. + if (!net.isOnline()) { + this.markAllOffline(); + return; + } + const results = await Promise.all( + SERVICE_STATUS_DEFINITIONS.map((def) => this.refreshOne(def)), + ); + // Only record a "successful refresh" when at least one fetch actually + // worked, so a transient failure doesn't lock the 30-second debounce + // window in refreshIfStale() and prevent a quick recovery. + if (results.some(Boolean)) { + this.lastRefreshAt = Date.now(); + } + } + + private async refreshOne(def: ServiceStatusDefinition): Promise { + try { + const json = await this.fetchJson(def.apiUrl); + const indicator = json.status?.indicator ?? null; + const description = + json.status?.description || + (indicator === "none" ? "全システム正常" : "ステータス不明"); + this.updateSnapshot({ + id: def.id, + label: def.label, + statusUrl: def.statusUrl, + level: indicatorToLevel(indicator), + indicator, + description, + checkedAt: Date.now(), + fetchError: null, + }); + return true; + } catch (error) { + const message = + error instanceof Error ? error.message : String(error ?? "unknown"); + this.updateSnapshot({ + id: def.id, + label: def.label, + statusUrl: def.statusUrl, + level: "unknown", + indicator: null, + description: "ステータスを取得できませんでした", + checkedAt: Date.now(), + fetchError: message, + }); + return false; + } + } + + private markAllOffline(): void { + for (const def of SERVICE_STATUS_DEFINITIONS) { + this.updateSnapshot({ + id: def.id, + label: def.label, + statusUrl: def.statusUrl, + level: "unknown", + indicator: null, + description: "Offline", + // Leave fetchError null so the tooltip just shows "Offline" + // instead of the redundant "… · offline" suffix. + checkedAt: Date.now(), + fetchError: null, + }); + } + } + + private updateSnapshot(next: ServiceStatusSnapshot): void { + this.snapshots.set(next.id, next); + // Always emit so renderers receive the latest checkedAt. The tooltip + // renders "N分前に確認" from snapshot.checkedAt against Date.now(); if + // we skip emit when level/description are unchanged, the renderer's + // checkedAt stays pinned to the first snapshot it received and the + // label drifts (e.g. "45分前") while polling keeps running every 5 + // minutes. + this.emit("change", next); + } + + // Use Electron's net module so fetch uses Chromium's network stack and + // bypasses renderer-side CORS / proxy quirks. + private fetchJson(url: string): Promise { + return new Promise((resolve, reject) => { + const request = net.request({ + method: "GET", + url, + redirect: "follow", + }); + let timedOut = false; + const timeout = setTimeout(() => { + timedOut = true; + request.abort(); + reject(new Error(`Request timed out after ${REQUEST_TIMEOUT_MS}ms`)); + }, REQUEST_TIMEOUT_MS); + + request.on("response", (response) => { + const chunks: Buffer[] = []; + response.on("data", (chunk: Buffer) => { + chunks.push(chunk); + }); + response.on("end", () => { + clearTimeout(timeout); + if (timedOut) return; + if (response.statusCode < 200 || response.statusCode >= 300) { + reject(new Error(`HTTP ${response.statusCode}`)); + return; + } + try { + const body = Buffer.concat(chunks).toString("utf-8"); + resolve(JSON.parse(body) as StatuspageResponse); + } catch (parseError) { + reject(parseError); + } + }); + response.on("error", (err: Error) => { + clearTimeout(timeout); + if (timedOut) return; + reject(err); + }); + }); + request.on("error", (err) => { + clearTimeout(timeout); + if (timedOut) return; + reject(err); + }); + request.end(); + }); + } +} + +export const serviceStatusService = new ServiceStatusService(); + +let pollingWired = false; + +export function setupServiceStatusPolling(): void { + // Guard against duplicate wiring on HMR / re-init — the inner `start()` + // is already idempotent via its `started` flag, but `app.on(...)` would + // otherwise accumulate focus listeners across reloads. + if (pollingWired) return; + pollingWired = true; + serviceStatusService.start(); + const onFocus = () => { + // Debounced refresh — protects the poller from rapid window switches. + serviceStatusService.refreshIfStale(); + }; + app.on("browser-window-focus", onFocus); + app.on("before-quit", () => { + app.off("browser-window-focus", onFocus); + serviceStatusService.stop(); + }); +} diff --git a/apps/desktop/src/main/lib/shell-history.ts b/apps/desktop/src/main/lib/shell-history.ts new file mode 100644 index 00000000000..b547c731c13 --- /dev/null +++ b/apps/desktop/src/main/lib/shell-history.ts @@ -0,0 +1,261 @@ +import { constants } from "node:fs"; +import { + access, + chmod, + readFile, + rename, + stat, + writeFile, +} from "node:fs/promises"; +import { homedir } from "node:os"; +import { dirname, join } from "node:path"; + +export interface ShellHistoryEntry { + command: string; + lastRunAt: number | null; +} + +let cachedHistory: ShellHistoryEntry[] | null = null; +let lastReadTime = 0; +const CACHE_TTL_MS = 30_000; + +const META_MARKER = 0x83; + +function decodeMetafied(buffer: Buffer): string { + const decoded: number[] = []; + for (let i = 0; i < buffer.length; i++) { + if (buffer[i] === META_MARKER && i + 1 < buffer.length) { + decoded.push(buffer[i + 1] ^ 0x20); + i++; + } else { + decoded.push(buffer[i]); + } + } + return Buffer.from(decoded).toString("utf-8"); +} + +function parseZshHistory(content: string): ShellHistoryEntry[] { + const entries: ShellHistoryEntry[] = []; + for (const line of content.split("\n")) { + if (!line.trim()) continue; + // Extended format: : timestamp:0;command + const match = line.match(/^:\s*(\d+):\d+;(.+)$/); + const command = match ? match[2] : line; + const timestamp = match ? Number.parseInt(match[1], 10) * 1000 : null; + // Skip multi-line continuations + if (command.endsWith("\\")) continue; + const trimmed = command.trim(); + if (trimmed) { + entries.push({ command: trimmed, lastRunAt: timestamp }); + } + } + return entries; +} + +function parseBashHistory(content: string): ShellHistoryEntry[] { + return content + .split("\n") + .filter((line) => line.trim() && !line.startsWith("#")) + .map((line) => ({ + command: line.trim(), + lastRunAt: null, + })); +} + +async function readHistoryFile(): Promise { + const home = homedir(); + + // Try zsh first (more common on macOS) + const zshPath = `${home}/.zsh_history`; + try { + await access(zshPath, constants.R_OK); + const buffer = await readFile(zshPath); + const content = buffer.includes(META_MARKER) + ? decodeMetafied(buffer) + : buffer.toString("utf-8"); + return parseZshHistory(content); + } catch { + // zsh history not available + } + + // Fall back to bash + const bashPath = `${home}/.bash_history`; + try { + await access(bashPath, constants.R_OK); + const content = await readFile(bashPath, "utf-8"); + return parseBashHistory(content); + } catch { + // bash history not available + } + + return []; +} + +async function getHistory(): Promise { + const now = Date.now(); + if (cachedHistory && now - lastReadTime < CACHE_TTL_MS) { + return cachedHistory; + } + + const entries = await readHistoryFile(); + + // Deduplicate, most-recent-first + const seen = new Set(); + const result: ShellHistoryEntry[] = []; + for (let i = entries.length - 1; i >= 0; i--) { + const entry = entries[i]; + if (!seen.has(entry.command)) { + seen.add(entry.command); + result.push(entry); + } + } + + cachedHistory = result.slice(0, 10_000); + lastReadTime = now; + return cachedHistory; +} + +const PAGE_SIZE = 8; + +export async function getSuggestions( + prefix: string, + offset = 0, +): Promise { + const history = await getHistory(); + const results: ShellHistoryEntry[] = []; + let skipped = 0; + + for (const entry of history) { + if (entry.command.startsWith(prefix) && entry.command !== prefix) { + if (skipped < offset) { + skipped++; + continue; + } + results.push(entry); + if (results.length >= PAGE_SIZE) break; + } + } + + return results; +} + +function encodeMetafied(text: string): Buffer { + const src = Buffer.from(text, "utf-8"); + const out: number[] = []; + for (let i = 0; i < src.length; i++) { + const b = src[i]; + // zsh encodes any byte with high bit set (>= 0x80) + if (b >= 0x80) { + out.push(META_MARKER, b ^ 0x20); + } else { + out.push(b); + } + } + return Buffer.from(out); +} + +function filterZshLines(lines: string[], commandToDelete: string): string[] { + const filtered: string[] = []; + let i = 0; + while (i < lines.length) { + const line = lines[i]; + const match = line.match(/^:\s*\d+:\d+;(.+)$/); + const cmd = match ? match[1] : null; + + if (cmd !== null) { + // Collect continuation lines (ending with \) + let fullCmd = cmd; + let blockLen = 1; + while (fullCmd.endsWith("\\") && i + blockLen < lines.length) { + fullCmd = fullCmd.slice(0, -1) + lines[i + blockLen]; + blockLen++; + } + if (fullCmd.trim() === commandToDelete.trim()) { + i += blockLen; + continue; + } + } + filtered.push(line); + i++; + } + return filtered; +} + +function filterBashLines(lines: string[], commandToDelete: string): string[] { + return lines.filter((line) => line.trim() !== commandToDelete.trim()); +} + +async function atomicWriteFile( + filePath: string, + content: Buffer, +): Promise { + const tmp = join( + dirname(filePath), + `.superset-hist-${Date.now()}-${Math.random().toString(36).slice(2)}`, + ); + await writeFile(tmp, content, { mode: 0o600 }); + try { + const orig = await stat(filePath); + await chmod(tmp, orig.mode); + } catch { + // keep default 0o600 + } + await rename(tmp, filePath); +} + +export async function deleteHistoryEntry(command: string): Promise { + const home = homedir(); + + // Try zsh first + const zshPath = `${home}/.zsh_history`; + try { + await access(zshPath, constants.R_OK | constants.W_OK); + const buffer = await readFile(zshPath); + const isMetafiedFile = buffer.includes(META_MARKER); + const content = isMetafiedFile + ? decodeMetafied(buffer) + : buffer.toString("utf-8"); + + const lines = content.split("\n"); + const filtered = filterZshLines(lines, command); + if (filtered.length === lines.length) { + // Nothing deleted + cachedHistory = null; + return; + } + + const newContent = filtered.join("\n"); + const newBuffer = isMetafiedFile + ? encodeMetafied(newContent) + : Buffer.from(newContent, "utf-8"); + await atomicWriteFile(zshPath, newBuffer); + cachedHistory = null; + return; + } catch (err) { + const code = (err as NodeJS.ErrnoException).code; + if (code !== "ENOENT" && code !== "EACCES") { + console.warn("[shell-history] Failed to delete from zsh history:", err); + } + } + + // Fall back to bash + const bashPath = `${home}/.bash_history`; + try { + await access(bashPath, constants.R_OK | constants.W_OK); + const content = await readFile(bashPath, "utf-8"); + const lines = content.split("\n"); + const filtered = filterBashLines(lines, command); + if (filtered.length < lines.length) { + await atomicWriteFile( + bashPath, + Buffer.from(filtered.join("\n"), "utf-8"), + ); + } + cachedHistory = null; + } catch (err) { + const code = (err as NodeJS.ErrnoException).code; + if (code !== "ENOENT" && code !== "EACCES") { + console.warn("[shell-history] Failed to delete from bash history:", err); + } + } +} diff --git a/apps/desktop/src/main/lib/temp-audio-protocol.ts b/apps/desktop/src/main/lib/temp-audio-protocol.ts new file mode 100644 index 00000000000..2a6189b2ff8 --- /dev/null +++ b/apps/desktop/src/main/lib/temp-audio-protocol.ts @@ -0,0 +1,31 @@ +import { createFileProtocolResponse, getMediaMimeType } from "./file-streaming"; + +const registry = new Map(); + +export function registerTempAudio(id: string, filePath: string): void { + registry.set(id, filePath); +} + +export function unregisterTempAudio(id: string): void { + registry.delete(id); +} + +export function getTempAudioPath(id: string): string | null { + return registry.get(id) ?? null; +} + +export function createTempAudioProtocolHandler() { + return async (request: Request): Promise => { + const url = new URL(request.url); + const id = url.hostname; + const filePath = registry.get(id); + if (!filePath) { + return new Response("Not found", { status: 404 }); + } + + return createFileProtocolResponse(request, filePath, { + contentType: getMediaMimeType(filePath) ?? "audio/mpeg", + cacheControl: "no-store", + }); + }; +} diff --git a/apps/desktop/src/main/lib/terminal-host/client.ts b/apps/desktop/src/main/lib/terminal-host/client.ts index 7c335cf516d..334b77f05c4 100644 --- a/apps/desktop/src/main/lib/terminal-host/client.ts +++ b/apps/desktop/src/main/lib/terminal-host/client.ts @@ -1308,6 +1308,13 @@ export class TerminalHostClient extends EventEmitter { }); } + private isCreateOrAttachTimeoutError(error: unknown): boolean { + return ( + error instanceof Error && + error.message === "Request timeout: createOrAttach" + ); + } + /** * Send a notification (no pending request / no timeout). * @@ -1374,19 +1381,10 @@ export class TerminalHostClient extends EventEmitter { return `${sessionId}:${requestId}`; } - // =========================================================================== - // Public API - // =========================================================================== - - /** - * Create or attach to a terminal session - */ - async createOrAttach( + private throwIfCreateOrAttachCanceled( request: CreateOrAttachRequest, signal?: AbortSignal, - ): Promise { - throwIfAborted(signal); - await this.ensureConnected(); + ): void { throwIfAborted(signal); if ( request.requestId && @@ -1399,10 +1397,40 @@ export class TerminalHostClient extends EventEmitter { ) { throw new TerminalAttachCanceledError(); } - const response = await this.sendRequest( - "createOrAttach", - request, - ); + } + + // =========================================================================== + // Public API + // =========================================================================== + + /** + * Create or attach to a terminal session + */ + async createOrAttach( + request: CreateOrAttachRequest, + signal?: AbortSignal, + ): Promise { + this.throwIfCreateOrAttachCanceled(request, signal); + await this.ensureConnected(); + this.throwIfCreateOrAttachCanceled(request, signal); + let response: CreateOrAttachResponse; + try { + response = await this.sendRequest( + "createOrAttach", + request, + ); + } catch (error) { + if (!this.isCreateOrAttachTimeoutError(error)) { + throw error; + } + this.resetConnectionState({ emitDisconnected: false }); + await this.ensureConnected(); + this.throwIfCreateOrAttachCanceled(request, signal); + response = await this.sendRequest( + "createOrAttach", + request, + ); + } // Version skew: older daemons may not return pid - normalize undefined → null return { ...response, pid: response.pid ?? null }; } diff --git a/apps/desktop/src/main/lib/terminal-host/headless-emulator.ts b/apps/desktop/src/main/lib/terminal-host/headless-emulator.ts index 4b3cf7656d3..035f8dd3ec7 100644 --- a/apps/desktop/src/main/lib/terminal-host/headless-emulator.ts +++ b/apps/desktop/src/main/lib/terminal-host/headless-emulator.ts @@ -124,14 +124,18 @@ export class HeadlessEmulator { * Data is buffered and will be processed asynchronously. * Use writeSync() if you need to wait for the write to complete. */ - write(data: string): void { + write(data: string, onProcessed?: () => void): void { if (this.disposed) return; if (!DEBUG_EMULATOR_TIMING) { // Parse escape sequences with chunk-safe buffering this.parseEscapeSequences(data); // Write to headless terminal (buffered/async) - this.terminal.write(data); + if (onProcessed) { + this.terminal.write(data, onProcessed); + } else { + this.terminal.write(data); + } return; } @@ -140,7 +144,11 @@ export class HeadlessEmulator { const parseTime = performance.now() - parseStart; const terminalStart = performance.now(); - this.terminal.write(data); + if (onProcessed) { + this.terminal.write(data, onProcessed); + } else { + this.terminal.write(data); + } const terminalTime = performance.now() - terminalStart; if (parseTime > 2 || terminalTime > 2) { diff --git a/apps/desktop/src/main/lib/terminal-host/types.ts b/apps/desktop/src/main/lib/terminal-host/types.ts index d957d434527..36ec8a568fc 100644 --- a/apps/desktop/src/main/lib/terminal-host/types.ts +++ b/apps/desktop/src/main/lib/terminal-host/types.ts @@ -190,6 +190,8 @@ export interface CancelCreateOrAttachRequest { export interface WriteRequest { sessionId: string; data: string; + /** True when the write originates from direct user keyboard input. */ + interactive?: boolean; } /** diff --git a/apps/desktop/src/main/lib/terminal/daemon/constants.ts b/apps/desktop/src/main/lib/terminal/daemon/constants.ts index 2d94871072f..728e53ab2f3 100644 --- a/apps/desktop/src/main/lib/terminal/daemon/constants.ts +++ b/apps/desktop/src/main/lib/terminal/daemon/constants.ts @@ -1,6 +1,6 @@ export const SESSION_CLEANUP_DELAY_MS = 5000; export const DEBUG_TERMINAL = process.env.SUPERSET_TERMINAL_DEBUG === "1"; -export const CREATE_OR_ATTACH_CONCURRENCY = 3; +export const CREATE_OR_ATTACH_CONCURRENCY = 8; export const MAX_SCROLLBACK_BYTES = 500_000; export const MAX_HISTORY_SCROLLBACK_BYTES = 512 * 1024; export const MAX_KILLED_SESSION_TOMBSTONES = 1000; diff --git a/apps/desktop/src/main/lib/terminal/daemon/daemon-manager.test.ts b/apps/desktop/src/main/lib/terminal/daemon/daemon-manager.test.ts index d1a7fb8b5eb..08f216e9d4a 100644 --- a/apps/desktop/src/main/lib/terminal/daemon/daemon-manager.test.ts +++ b/apps/desktop/src/main/lib/terminal/daemon/daemon-manager.test.ts @@ -176,10 +176,13 @@ mock.module("main/lib/analytics", () => ({ })); mock.module("../env", () => ({ - buildTerminalEnv: () => ({}), getDefaultShell: () => "/bin/zsh", })); +mock.module("../env-terminal", () => ({ + buildTerminalEnv: () => ({}), +})); + mock.module("main/lib/app-state", () => ({ appState: { data: null }, })); diff --git a/apps/desktop/src/main/lib/terminal/env-terminal.ts b/apps/desktop/src/main/lib/terminal/env-terminal.ts new file mode 100644 index 00000000000..e7641277879 --- /dev/null +++ b/apps/desktop/src/main/lib/terminal/env-terminal.ts @@ -0,0 +1,143 @@ +import fs from "node:fs"; +import os from "node:os"; +import { settings } from "@superset/local-db"; +import { DEFAULT_PREVENT_AGENT_SLEEP } from "shared/constants"; +import { env } from "shared/env.shared"; +import { getShellEnv } from "../agent-setup/shell-wrappers"; +import { localDb } from "../local-db"; +import { + buildSafeEnv, + getLocale, + HOOK_PROTOCOL_VERSION, + sanitizeEnv, +} from "./env"; + +const MACOS_SYSTEM_CERT_FILE = "/etc/ssl/cert.pem"; +const PROCESS_ENV_SNAPSHOT_CACHE_TTL_MS = 1_000; + +let cachedProcessEnvSnapshot: { + raw: Record; + safe: Record; + expiresAt: number; +} | null = null; +let cachedMacosSystemCertAvailable: boolean | null = null; + +function getProcessEnvSnapshot(): { + raw: Record; + safe: Record; +} { + const now = Date.now(); + if (cachedProcessEnvSnapshot && cachedProcessEnvSnapshot.expiresAt > now) { + return cachedProcessEnvSnapshot; + } + + const raw = sanitizeEnv(process.env) || {}; + const safe = buildSafeEnv(raw); + cachedProcessEnvSnapshot = { + raw, + safe, + expiresAt: now + PROCESS_ENV_SNAPSHOT_CACHE_TTL_MS, + }; + return cachedProcessEnvSnapshot; +} + +function hasMacosSystemCertBundle(): boolean { + if (cachedMacosSystemCertAvailable !== null) { + return cachedMacosSystemCertAvailable; + } + + cachedMacosSystemCertAvailable = fs.existsSync(MACOS_SYSTEM_CERT_FILE); + return cachedMacosSystemCertAvailable; +} + +export function resetTerminalEnvCachesForTests(): void { + cachedProcessEnvSnapshot = null; + cachedMacosSystemCertAvailable = null; +} + +/** + * @deprecated Use buildSafeEnv instead. Kept for backward compatibility. + */ +export function removeAppEnvVars( + env: Record, +): Record { + return buildSafeEnv(env); +} + +export function buildTerminalEnv(params: { + shell: string; + paneId: string; + tabId: string; + workspaceId: string; + workspaceName?: string; + workspacePath?: string; + rootPath?: string; + themeType?: "dark" | "light"; +}): Record { + const { + shell, + paneId, + tabId, + workspaceId, + workspaceName, + workspacePath, + rootPath, + themeType, + } = params; + + // Get Electron's process.env and filter to only allowlisted safe vars + // This prevents secrets and app config from leaking to user terminals + const { raw: rawBaseEnv, safe: baseEnv } = getProcessEnvSnapshot(); + + // shellEnv provides shell wrapper control variables (ZDOTDIR, BASH_ENV, etc.) + // These configure how the shell initializes, not the user's actual environment + const shellEnv = getShellEnv(shell); + const locale = getLocale(rawBaseEnv); + + // COLORFGBG: "foreground;background" ANSI color indices — TUI apps use this to detect light/dark + const colorFgBg = themeType === "light" ? "0;15" : "15;0"; + const preventAgentSleepSetting = + localDb.select().from(settings).get()?.preventAgentSleep ?? + DEFAULT_PREVENT_AGENT_SLEEP; + + const terminalEnv: Record = { + ...baseEnv, + ...shellEnv, + TERM_PROGRAM: "Superset", + TERM_PROGRAM_VERSION: process.env.npm_package_version || "1.0.0", + COLORTERM: "truecolor", + COLORFGBG: colorFgBg, + LANG: locale, + // Browser-MCP bridge discovery: propagate the resolved Superset home + // dir so MCP servers spawned by claude/codex in this terminal read + // the correct workspace-scoped browser-mcp.json. + SUPERSET_HOME_DIR: + process.env.SUPERSET_HOME_DIR ?? shellEnv.SUPERSET_HOME_DIR ?? "", + SUPERSET_PANE_ID: paneId, + SUPERSET_TAB_ID: tabId, + SUPERSET_WORKSPACE_ID: workspaceId, + SUPERSET_WORKSPACE_NAME: workspaceName || "", + SUPERSET_WORKSPACE_PATH: workspacePath || "", + SUPERSET_ROOT_PATH: rootPath || "", + SUPERSET_PORT: String(env.DESKTOP_NOTIFICATIONS_PORT), + // Environment identifier for dev/prod separation + SUPERSET_ENV: env.NODE_ENV === "development" ? "development" : "production", + // Hook protocol version for forward compatibility + SUPERSET_HOOK_VERSION: HOOK_PROTOCOL_VERSION, + SUPERSET_PREVENT_AGENT_SLEEP: preventAgentSleepSetting ? "1" : "0", + }; + + delete terminalEnv.GOOGLE_API_KEY; + + // Electron child processes can't access macOS Keychain for TLS cert verification, + // causing "x509: OSStatus -26276" in Go binaries like `gh`. File-based fallback. + if ( + os.platform() === "darwin" && + !terminalEnv.SSL_CERT_FILE && + hasMacosSystemCertBundle() + ) { + terminalEnv.SSL_CERT_FILE = MACOS_SYSTEM_CERT_FILE; + } + + return terminalEnv; +} diff --git a/apps/desktop/src/main/lib/terminal/env.test.ts b/apps/desktop/src/main/lib/terminal/env.test.ts index a0c661abe2c..09854f97fce 100644 --- a/apps/desktop/src/main/lib/terminal/env.test.ts +++ b/apps/desktop/src/main/lib/terminal/env.test.ts @@ -1,23 +1,28 @@ import { afterEach, beforeEach, describe, expect, it } from "bun:test"; import { buildSafeEnv, - buildTerminalEnv, FALLBACK_SHELL, getLocale, normalizeDefaultShell, - removeAppEnvVars, resetTerminalEnvCachesForTests, SHELL_CRASH_THRESHOLD_MS, sanitizeEnv, } from "./env"; +import { + buildTerminalEnv, + removeAppEnvVars, + resetTerminalEnvCachesForTests as resetEnvTerminalCaches, +} from "./env-terminal"; describe("env", () => { beforeEach(() => { resetTerminalEnvCachesForTests(); + resetEnvTerminalCaches(); }); afterEach(() => { resetTerminalEnvCachesForTests(); + resetEnvTerminalCaches(); }); describe("constants", () => { diff --git a/apps/desktop/src/main/lib/terminal/port-scanner.ts b/apps/desktop/src/main/lib/terminal/port-scanner.ts index 748cf187961..1b6089de6ad 100644 --- a/apps/desktop/src/main/lib/terminal/port-scanner.ts +++ b/apps/desktop/src/main/lib/terminal/port-scanner.ts @@ -272,3 +272,80 @@ async function getProcessNameWindows( } return "unknown"; } + +export async function getProcessName(pid: number): Promise { + // FORK NOTE: Windows は `ps` が無いので ENOENT で常に fallback してしまう。 + // プラットフォーム分岐で既存の Windows ヘルパーへ委譲する。 + if (os.platform() === "win32") { + return getProcessNameWindows(pid); + } + try { + const { stdout: output } = await execFileAsync( + "ps", + ["-p", String(pid), "-o", "comm="], + { + timeout: EXEC_TIMEOUT_MS, + }, + ); + return output.trim() || "unknown"; + } catch { + return "unknown"; + } +} + +/** + * Get the full command line for a PID on Windows. + * Mirrors getProcessNameWindows but returns the full command line + * (wmic `commandline` column, PowerShell `CommandLine` property) so + * pane-resolver / browser-automation can match terminal processes + * cross-platform. + */ +async function getProcessCommandWindows( + pid: number, + signal?: AbortSignal, +): Promise { + try { + const { stdout } = await execFileAsync( + "wmic", + ["process", "where", `processid=${pid}`, "get", "commandline"], + { timeout: EXEC_TIMEOUT_MS, signal }, + ); + const lines = stdout.trim().split("\n"); + if (lines.length >= 2) { + return lines.slice(1).join("\n").trim(); + } + } catch { + // wmic is deprecated, try PowerShell as fallback + try { + const { stdout } = await execFileAsync( + "powershell", + [ + "-Command", + `(Get-CimInstance Win32_Process -Filter "ProcessId=${pid}").CommandLine`, + ], + { timeout: EXEC_TIMEOUT_MS, signal }, + ); + return stdout.trim(); + } catch {} + } + return ""; +} + +export async function getProcessCommand(pid: number): Promise { + // FORK NOTE: Windows 分岐 — 上記 getProcessName と同じ理由。 + if (os.platform() === "win32") { + return getProcessCommandWindows(pid); + } + try { + const { stdout } = await execFileAsync( + "ps", + ["-p", String(pid), "-o", "args="], + { + timeout: EXEC_TIMEOUT_MS, + }, + ); + return stdout.trim(); + } catch { + return ""; + } +} diff --git a/apps/desktop/src/main/lib/terminal/session.ts b/apps/desktop/src/main/lib/terminal/session.ts index e56be603369..c9d8886508e 100644 --- a/apps/desktop/src/main/lib/terminal/session.ts +++ b/apps/desktop/src/main/lib/terminal/session.ts @@ -10,7 +10,8 @@ import { containsClearScrollbackSequence, extractContentAfterClear, } from "../terminal-escape-filter"; -import { buildTerminalEnv, FALLBACK_SHELL, getDefaultShell } from "./env"; +import { FALLBACK_SHELL, getDefaultShell } from "./env"; +import { buildTerminalEnv } from "./env-terminal"; import { PtyWriteQueue } from "./pty-write-queue"; import type { InternalCreateSessionParams, TerminalSession } from "./types"; diff --git a/apps/desktop/src/main/lib/todo-daemon/client.ts b/apps/desktop/src/main/lib/todo-daemon/client.ts new file mode 100644 index 00000000000..bf0e9889fcd --- /dev/null +++ b/apps/desktop/src/main/lib/todo-daemon/client.ts @@ -0,0 +1,801 @@ +/** + * TODO Agent Daemon Client + * + * Client library for the Electron main process to communicate with + * the todo-agent daemon. Mirrors the pattern used by terminal-host/client.ts + * but scoped to the smaller TODO-agent protocol. + * + * The daemon owns `claude -p` child processes so TODO sessions survive + * app restarts — see issue #237. + */ + +import { spawn } from "node:child_process"; +import { randomBytes, randomUUID } from "node:crypto"; +import { EventEmitter } from "node:events"; +import { + chmodSync, + closeSync, + existsSync, + mkdirSync, + openSync, + readFileSync, + statSync, + unlinkSync, + writeFileSync, +} from "node:fs"; +import { connect, type Socket } from "node:net"; +import { homedir } from "node:os"; +import { join } from "node:path"; +import { app } from "electron"; +import { todoAgentMainDebug } from "main/todo-agent/debug"; +import { SUPERSET_DIR_NAME } from "shared/constants"; +import { + type AbortRequest, + type EmptyResponse, + type HelloResponse, + type IpcEvent, + type IpcResponse, + type ListActiveResponse, + type QueueInterventionRequest, + type ResumeWaitingRequest, + type SessionStateEventPayload, + type SessionStreamEventPayload, + type ShutdownRequest, + type StartRequest, + TODO_DAEMON_PROTOCOL_VERSION, +} from "./types"; + +const DEBUG = process.env.SUPERSET_TODO_DAEMON_DEBUG === "1"; + +const SUPERSET_HOME_DIR = join(homedir(), SUPERSET_DIR_NAME); +const SOCKET_PATH = join(SUPERSET_HOME_DIR, "todo-daemon.sock"); +const TOKEN_PATH = join(SUPERSET_HOME_DIR, "todo-daemon.token"); +const PID_PATH = join(SUPERSET_HOME_DIR, "todo-daemon.pid"); +const SPAWN_LOCK_PATH = join(SUPERSET_HOME_DIR, "todo-daemon.spawn.lock"); +const SCRIPT_MTIME_PATH = join(SUPERSET_HOME_DIR, "todo-daemon.mtime"); + +const CONNECT_TIMEOUT_MS = 5_000; +const SPAWN_WAIT_MS = 3_000; +const REQUEST_TIMEOUT_MS = 30_000; +const SPAWN_LOCK_TIMEOUT_MS = 10_000; +const MAX_DAEMON_LOG_BYTES = 5 * 1024 * 1024; + +function log(level: "info" | "warn" | "error", message: string): void { + if (!DEBUG && level === "info") return; + const prefix = `[todo-daemon-client]`; + if (level === "error") { + console.error(`${prefix} ${message}`); + } else if (level === "warn") { + console.warn(`${prefix} ${message}`); + } else { + console.log(`${prefix} ${message}`); + } +} + +class NdjsonParser { + private remainder = ""; + + parse(chunk: string): Array { + const messages: Array = []; + const data = this.remainder + chunk; + this.remainder = ""; + + let startIndex = 0; + let newlineIndex = data.indexOf("\n"); + while (newlineIndex !== -1) { + const line = data.slice(startIndex, newlineIndex); + if (line.trim()) { + try { + messages.push(JSON.parse(line)); + } catch { + log("warn", "Failed to parse NDJSON line"); + } + } + startIndex = newlineIndex + 1; + newlineIndex = data.indexOf("\n", startIndex); + } + if (startIndex < data.length) { + this.remainder = data.slice(startIndex); + } + return messages; + } +} + +interface PendingRequest { + resolve: (value: unknown) => void; + reject: (error: Error) => void; + timeoutId: NodeJS.Timeout; +} + +export interface TodoDaemonClientEvents { + sessionState: (payload: SessionStateEventPayload) => void; + streamEvents: (payload: SessionStreamEventPayload) => void; + connected: () => void; + disconnected: () => void; + error: (error: Error) => void; +} + +enum ConnectionState { + DISCONNECTED = "disconnected", + CONNECTING = "connecting", + CONNECTED = "connected", +} + +export class TodoDaemonClient extends EventEmitter { + private socket: Socket | null = null; + private parser = new NdjsonParser(); + private pendingRequests = new Map(); + private requestCounter = 0; + private authenticated = false; + private connectionState = ConnectionState.DISCONNECTED; + private disposed = false; + private disconnectArmed = false; + private activeSessionIds: string[] = []; + + async ensureConnected(): Promise { + if ( + this.connectionState === ConnectionState.CONNECTED && + this.socket && + this.authenticated + ) { + return; + } + if (this.connectionState === ConnectionState.CONNECTING) { + return this.waitForConnection(); + } + this.connectionState = ConnectionState.CONNECTING; + this.disconnectArmed = false; + try { + await this.connectAndAuthenticate(); + this.connectionState = ConnectionState.CONNECTED; + this.emit("connected"); + } catch (error) { + this.resetConnectionState({ emitDisconnected: false }); + throw error; + } + } + + /** Sessions the daemon reported as in-flight at last hello. */ + getKnownActiveSessionIds(): readonly string[] { + return this.activeSessionIds; + } + + private async waitForConnection(): Promise { + const start = Date.now(); + while (this.connectionState === ConnectionState.CONNECTING) { + if (Date.now() - start > 10_000) { + throw new Error("Timed out waiting for daemon connection"); + } + await this.sleep(100); + } + if ( + this.connectionState !== ConnectionState.CONNECTED || + !this.authenticated + ) { + throw new Error("Connection attempt failed"); + } + } + + private async connectAndAuthenticate(): Promise { + for (let attempt = 0; attempt < 2; attempt++) { + if ( + attempt === 0 && + process.env.NODE_ENV === "development" && + this.isDaemonScriptStale() + ) { + log("info", "Daemon script rebuilt, restarting..."); + this.killDaemonFromPidFile(); + await this.waitForDaemonShutdown(); + } + + let connected = await this.tryConnect(); + if (!connected) { + await this.spawnDaemon(); + connected = await this.tryConnect(); + if (!connected) { + throw new Error("Failed to connect to daemon after spawn"); + } + } + + const token = this.readAuthToken(); + try { + const response = await this.sendRequest("hello", { + protocolVersion: TODO_DAEMON_PROTOCOL_VERSION, + token, + }); + if (response.protocolVersion !== TODO_DAEMON_PROTOCOL_VERSION) { + if (attempt === 0) { + log( + "info", + `Protocol mismatch (client=${TODO_DAEMON_PROTOCOL_VERSION}, daemon=${response.protocolVersion}), restarting daemon`, + ); + this.killDaemonFromPidFile(); + await this.waitForDaemonShutdown(); + this.resetConnectionState({ emitDisconnected: false }); + continue; + } + throw new Error( + `Protocol version mismatch: client=${TODO_DAEMON_PROTOCOL_VERSION}, daemon=${response.protocolVersion}`, + ); + } + this.authenticated = true; + this.activeSessionIds = Array.isArray(response.activeSessionIds) + ? response.activeSessionIds.slice() + : []; + todoAgentMainDebug.info( + "todo-daemon-client-authenticated", + { + protocolVersion: response.protocolVersion, + activeSessionCount: this.activeSessionIds.length, + }, + { + captureMessage: true, + fingerprint: [ + "todo.agent.main", + "todo-daemon-client-authenticated", + ], + }, + ); + return; + } catch (error) { + if (attempt === 0) { + log( + "warn", + `hello failed (${ + error instanceof Error ? error.message : String(error) + }), retrying with a fresh daemon`, + ); + this.killDaemonFromPidFile(); + await this.waitForDaemonShutdown(); + this.resetConnectionState({ emitDisconnected: false }); + continue; + } + throw error; + } + } + throw new Error("Exhausted connection retries"); + } + + private async tryConnect(): Promise { + return new Promise((resolve) => { + if (!existsSync(SOCKET_PATH)) { + resolve(false); + return; + } + try { + this.socket?.destroy(); + } catch { + // ignore + } + this.socket = null; + this.authenticated = false; + + const socket = connect(SOCKET_PATH); + let resolved = false; + const timeout = setTimeout(() => { + if (!resolved) { + resolved = true; + socket.destroy(); + resolve(false); + } + }, CONNECT_TIMEOUT_MS); + socket.on("connect", () => { + if (resolved) return; + resolved = true; + clearTimeout(timeout); + socket.setEncoding("utf8"); + socket.unref(); + this.socket = socket; + this.setupSocketHandlers(); + resolve(true); + }); + socket.on("error", () => { + if (resolved) return; + resolved = true; + clearTimeout(timeout); + resolve(false); + }); + }); + } + + private setupSocketHandlers(): void { + const socket = this.socket; + if (!socket) return; + socket.on("data", (data: string) => { + const messages = this.parser.parse(data); + for (const message of messages) { + this.handleMessage(message); + } + }); + socket.on("close", () => { + if (this.socket !== socket) return; + this.handleDisconnect(); + }); + socket.on("error", (error) => { + if (this.socket !== socket) return; + this.emit("error", error); + this.handleDisconnect(); + }); + } + + private handleMessage(message: IpcResponse | IpcEvent): void { + if ("id" in message) { + const pending = this.pendingRequests.get(message.id); + if (!pending) return; + this.pendingRequests.delete(message.id); + clearTimeout(pending.timeoutId); + if (message.ok) { + pending.resolve(message.payload); + } else { + pending.reject( + new Error(`${message.error.code}: ${message.error.message}`), + ); + } + return; + } + if (message.type === "event") { + switch (message.event) { + case "sessionState": + this.emit( + "sessionState", + message.payload as SessionStateEventPayload, + ); + return; + case "streamEvents": + this.emit( + "streamEvents", + message.payload as SessionStreamEventPayload, + ); + return; + default: + log("warn", `Unknown event: ${message.event}`); + } + } + } + + private handleDisconnect(): void { + if (this.disconnectArmed) return; + this.disconnectArmed = true; + this.resetConnectionState({ emitDisconnected: true }); + } + + private resetConnectionState({ + emitDisconnected, + }: { + emitDisconnected: boolean; + }): void { + try { + this.socket?.destroy(); + } catch { + // ignore + } + this.socket = null; + this.authenticated = false; + this.connectionState = ConnectionState.DISCONNECTED; + this.parser = new NdjsonParser(); + for (const [id, pending] of this.pendingRequests.entries()) { + clearTimeout(pending.timeoutId); + pending.reject(new Error("Connection lost")); + this.pendingRequests.delete(id); + } + if (emitDisconnected) { + this.emit("disconnected"); + } + } + + private readAuthToken(): string { + if (!existsSync(TOKEN_PATH)) { + throw new Error("Auth token not found — daemon may not be running"); + } + return readFileSync(TOKEN_PATH, "utf-8").trim(); + } + + private ensureAuthToken(): string { + if (existsSync(TOKEN_PATH)) { + try { + return readFileSync(TOKEN_PATH, "utf-8").trim(); + } catch { + // fall through and regenerate + } + } + if (!existsSync(SUPERSET_HOME_DIR)) { + mkdirSync(SUPERSET_HOME_DIR, { recursive: true, mode: 0o700 }); + } + const token = randomBytes(32).toString("hex"); + writeFileSync(TOKEN_PATH, token, { mode: 0o600 }); + return token; + } + + private killDaemonFromPidFile(): void { + if (!existsSync(PID_PATH)) return; + try { + const raw = readFileSync(PID_PATH, "utf-8").trim(); + const pid = Number.parseInt(raw, 10); + if (!Number.isNaN(pid)) { + try { + process.kill(pid, "SIGTERM"); + } catch { + // stale pid + } + } + } catch { + // best effort + } + } + + private async waitForDaemonShutdown(): Promise { + const start = Date.now(); + while (Date.now() - start < 3_000) { + if (!existsSync(SOCKET_PATH)) return; + await this.sleep(100); + } + } + + private acquireSpawnLock(): boolean { + try { + if (!existsSync(SUPERSET_HOME_DIR)) { + mkdirSync(SUPERSET_HOME_DIR, { recursive: true, mode: 0o700 }); + } + try { + chmodSync(SUPERSET_HOME_DIR, 0o700); + } catch { + // best effort + } + if (existsSync(SPAWN_LOCK_PATH)) { + const lockContent = readFileSync(SPAWN_LOCK_PATH, "utf-8").trim(); + const lockTime = Number.parseInt(lockContent, 10); + if ( + !Number.isNaN(lockTime) && + Date.now() - lockTime < SPAWN_LOCK_TIMEOUT_MS + ) { + return false; + } + unlinkSync(SPAWN_LOCK_PATH); + } + writeFileSync(SPAWN_LOCK_PATH, String(Date.now()), { mode: 0o600 }); + return true; + } catch { + return false; + } + } + + private releaseSpawnLock(): void { + try { + if (existsSync(SPAWN_LOCK_PATH)) unlinkSync(SPAWN_LOCK_PATH); + } catch { + // ignore + } + } + + private isDaemonScriptStale(): boolean { + try { + if (!existsSync(SCRIPT_MTIME_PATH)) return false; + const savedMtime = readFileSync(SCRIPT_MTIME_PATH, "utf-8").trim(); + const scriptPath = this.getDaemonScriptPath(); + if (!existsSync(scriptPath)) return false; + const currentMtime = statSync(scriptPath).mtimeMs.toString(); + return savedMtime !== currentMtime; + } catch { + return false; + } + } + + private saveDaemonScriptMtime(): void { + try { + const scriptPath = this.getDaemonScriptPath(); + if (!existsSync(scriptPath)) return; + const mtime = statSync(scriptPath).mtimeMs.toString(); + writeFileSync(SCRIPT_MTIME_PATH, mtime, { mode: 0o600 }); + } catch { + // best effort + } + } + + private getDaemonScriptPath(): string { + const appPath = app.getAppPath(); + return join(appPath, "dist", "main", "todo-daemon.js"); + } + + private async spawnDaemon(): Promise { + if (existsSync(SOCKET_PATH)) { + const live = await this.isSocketLive(); + if (live) { + log("info", "Socket is live, daemon already running"); + return; + } + try { + unlinkSync(SOCKET_PATH); + } catch { + // ignore + } + } + if (existsSync(PID_PATH)) { + try { + unlinkSync(PID_PATH); + } catch { + // ignore + } + } + + if (!this.acquireSpawnLock()) { + log("info", "Another spawn in progress, waiting..."); + await this.waitForDaemon(); + return; + } + try { + this.ensureAuthToken(); + + const daemonScript = this.getDaemonScriptPath(); + if (!existsSync(daemonScript)) { + throw new Error(`Daemon script not found: ${daemonScript}`); + } + + const logPath = join(SUPERSET_HOME_DIR, "todo-daemon.log"); + let logFd: number; + try { + if (existsSync(logPath)) { + try { + const { size } = statSync(logPath); + if (size > MAX_DAEMON_LOG_BYTES) { + writeFileSync(logPath, "", { mode: 0o600 }); + } + } catch { + // best effort + } + } + logFd = openSync(logPath, "a", 0o600); + try { + chmodSync(logPath, 0o600); + } catch { + // best effort + } + } catch (error) { + log("warn", `Failed to open daemon log: ${error}`); + logFd = -1; + } + + let child: ReturnType | null = null; + try { + child = spawn(process.execPath, [daemonScript], { + detached: true, + stdio: logFd >= 0 ? ["ignore", logFd, logFd] : "ignore", + env: { + ...process.env, + ELECTRON_RUN_AS_NODE: "1", + NODE_ENV: process.env.NODE_ENV, + }, + }); + } finally { + if (logFd >= 0) { + try { + closeSync(logFd); + } catch { + // ignore + } + } + } + + if (!child) { + throw new Error("Failed to spawn daemon"); + } + log("info", `Daemon spawned PID=${child.pid}`); + child.unref(); + + await this.waitForDaemon(); + if (process.env.NODE_ENV === "development") { + this.saveDaemonScriptMtime(); + } + } finally { + this.releaseSpawnLock(); + } + } + + private async waitForDaemon(): Promise { + const start = Date.now(); + while (Date.now() - start < SPAWN_WAIT_MS) { + if (existsSync(SOCKET_PATH)) { + await this.sleep(150); + return; + } + await this.sleep(100); + } + throw new Error("Daemon failed to start in time"); + } + + private isSocketLive(): Promise { + return new Promise((resolve) => { + if (!existsSync(SOCKET_PATH)) { + resolve(false); + return; + } + const testSocket = connect(SOCKET_PATH); + const timeout = setTimeout(() => { + testSocket.destroy(); + resolve(false); + }, 1_000); + testSocket.on("connect", () => { + clearTimeout(timeout); + testSocket.destroy(); + resolve(true); + }); + testSocket.on("error", () => { + clearTimeout(timeout); + resolve(false); + }); + }); + } + + private sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); + } + + private sendRequest(type: string, payload: unknown): Promise { + return new Promise((resolve, reject) => { + if (!this.socket) { + reject(new Error("Not connected")); + return; + } + const id = `req_${++this.requestCounter}_${randomUUID().slice(0, 8)}`; + const timeoutId = setTimeout(() => { + this.pendingRequests.delete(id); + reject(new Error(`Request timeout: ${type}`)); + }, REQUEST_TIMEOUT_MS); + this.pendingRequests.set(id, { + resolve: resolve as (value: unknown) => void, + reject, + timeoutId, + }); + const message = `${JSON.stringify({ id, type, payload })}\n`; + this.socket.write(message); + }); + } + + // ========================================================================= + // Public API + // ========================================================================= + + async start(request: StartRequest): Promise { + todoAgentMainDebug.info( + "todo-daemon-client-start-request", + { + sessionId: request.sessionId, + fromScheduledWakeup: request.fromScheduledWakeup ?? false, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-daemon-client-start-request"], + }, + ); + try { + await this.ensureConnected(); + const response = await this.sendRequest("start", request); + todoAgentMainDebug.info( + "todo-daemon-client-start-request-success", + { + sessionId: request.sessionId, + fromScheduledWakeup: request.fromScheduledWakeup ?? false, + }, + { + captureMessage: true, + fingerprint: [ + "todo.agent.main", + "todo-daemon-client-start-request-success", + ], + }, + ); + return response; + } catch (error) { + todoAgentMainDebug.captureException( + error, + "todo-daemon-client-start-request-failed", + { + sessionId: request.sessionId, + fromScheduledWakeup: request.fromScheduledWakeup ?? false, + }, + { + fingerprint: [ + "todo.agent.main", + "todo-daemon-client-start-request-failed", + ], + }, + ); + throw error; + } + } + + async abort(request: AbortRequest): Promise { + await this.ensureConnected(); + return this.sendRequest("abort", request); + } + + async queueIntervention( + request: QueueInterventionRequest, + ): Promise { + await this.ensureConnected(); + return this.sendRequest("queueIntervention", request); + } + + async resumeWaiting(request: ResumeWaitingRequest): Promise { + await this.ensureConnected(); + return this.sendRequest("resumeWaiting", request); + } + + async settingsChanged(): Promise { + await this.ensureConnected(); + return this.sendRequest("settingsChanged", {}); + } + + async rehydrate(): Promise { + try { + await this.ensureConnected(); + const response = await this.sendRequest("rehydrate", {}); + todoAgentMainDebug.info( + "todo-daemon-client-rehydrate-success", + { + activeSessionCount: this.activeSessionIds.length, + }, + { + captureMessage: true, + fingerprint: [ + "todo.agent.main", + "todo-daemon-client-rehydrate-success", + ], + }, + ); + return response; + } catch (error) { + todoAgentMainDebug.captureException( + error, + "todo-daemon-client-rehydrate-failed", + undefined, + { + fingerprint: [ + "todo.agent.main", + "todo-daemon-client-rehydrate-failed", + ], + }, + ); + throw error; + } + } + + async listActive(): Promise { + await this.ensureConnected(); + return this.sendRequest("listActive", undefined); + } + + async shutdown(request: ShutdownRequest = {}): Promise { + await this.ensureConnected(); + const response = await this.sendRequest("shutdown", request); + this.disconnect(); + return response; + } + + disconnect(): void { + this.disconnectArmed = true; + this.resetConnectionState({ emitDisconnected: false }); + } + + dispose(): void { + if (this.disposed) return; + this.disposed = true; + this.disconnect(); + this.removeAllListeners(); + } +} + +let clientInstance: TodoDaemonClient | null = null; + +export function getTodoDaemonClient(): TodoDaemonClient { + if (!clientInstance) { + clientInstance = new TodoDaemonClient(); + } + return clientInstance; +} + +export function disposeTodoDaemonClient(): void { + if (clientInstance) { + clientInstance.dispose(); + clientInstance = null; + } +} diff --git a/apps/desktop/src/main/lib/todo-daemon/types.ts b/apps/desktop/src/main/lib/todo-daemon/types.ts new file mode 100644 index 00000000000..d05e4320a39 --- /dev/null +++ b/apps/desktop/src/main/lib/todo-daemon/types.ts @@ -0,0 +1,148 @@ +/** + * TODO Agent Daemon Protocol Types + * + * IPC protocol between the Electron main process and the todo-agent daemon. + * Changes must be additive-only for backwards compatibility. + * + * The daemon owns `claude -p` child processes so they survive app + * restarts. Issue #237. + */ + +import type { SelectTodoSession } from "@superset/local-db"; +import type { TodoStreamEvent } from "main/todo-agent/types"; + +export const TODO_DAEMON_PROTOCOL_VERSION = 1; + +// ============================================================================= +// IPC Framing +// ============================================================================= + +export interface IpcRequest { + id: string; + type: string; + payload: unknown; +} + +export interface IpcSuccessResponse { + id: string; + ok: true; + payload: unknown; +} + +export interface IpcErrorResponse { + id: string; + ok: false; + error: { + code: string; + message: string; + }; +} + +export type IpcResponse = IpcSuccessResponse | IpcErrorResponse; + +export interface IpcEvent { + type: "event"; + event: string; + payload: unknown; +} + +// ============================================================================= +// Request / Response Payloads +// ============================================================================= + +export interface HelloRequest { + protocolVersion: number; + token: string; +} + +export interface HelloResponse { + protocolVersion: number; + daemonVersion: string; + daemonPid: number; + /** IDs of sessions the daemon is actively driving right now. */ + activeSessionIds: string[]; +} + +export interface StartRequest { + sessionId: string; + /** + * True when the caller is the scheduler waking a `ScheduleWakeup`- + * paused session back up. The engine consumes this marker to skip + * the "再開" banner and to send a short continuation prompt instead + * of replaying the original goal — see issue #240. + */ + fromScheduledWakeup?: boolean; +} + +export interface AbortRequest { + sessionId: string; +} + +export interface QueueInterventionRequest { + sessionId: string; + data: string; +} + +export interface ResumeWaitingRequest { + /** Session IDs the scheduler has already claimed (flipped to queued). */ + sessionIds: string[]; +} + +export type SettingsChangedRequest = Record; + +export type RehydrateRequest = Record; + +export interface ListActiveResponse { + sessionIds: string[]; +} + +export interface ShutdownRequest { + /** If true, the daemon SIGINTs all in-flight claude children before exiting. */ + killSessions?: boolean; +} + +export interface EmptyResponse { + success: true; +} + +// ============================================================================= +// Event Payloads (daemon → main) +// ============================================================================= + +/** Fired when the daemon writes to a `todo_sessions` row. */ +export interface SessionStateEventPayload { + session: SelectTodoSession; +} + +/** Fired when the daemon appends stream events for a session. */ +export interface SessionStreamEventPayload { + sessionId: string; + events: TodoStreamEvent[]; +} + +// ============================================================================= +// Type Map +// ============================================================================= + +export type RequestTypeMap = { + hello: { request: HelloRequest; response: HelloResponse }; + start: { request: StartRequest; response: EmptyResponse }; + abort: { request: AbortRequest; response: EmptyResponse }; + queueIntervention: { + request: QueueInterventionRequest; + response: EmptyResponse; + }; + resumeWaiting: { request: ResumeWaitingRequest; response: EmptyResponse }; + settingsChanged: { + request: SettingsChangedRequest; + response: EmptyResponse; + }; + rehydrate: { request: RehydrateRequest; response: EmptyResponse }; + listActive: { request: undefined; response: ListActiveResponse }; + shutdown: { request: ShutdownRequest; response: EmptyResponse }; +}; + +export type EventTypeMap = { + sessionState: SessionStateEventPayload; + streamEvents: SessionStreamEventPayload; +}; diff --git a/apps/desktop/src/main/lib/tray/index.ts b/apps/desktop/src/main/lib/tray/index.ts index 278ed131fbb..eea0450088d 100644 --- a/apps/desktop/src/main/lib/tray/index.ts +++ b/apps/desktop/src/main/lib/tray/index.ts @@ -9,7 +9,8 @@ import { } from "electron"; import { loadToken } from "lib/trpc/routers/auth/utils/auth-functions"; import { env } from "main/env.main"; -import { focusMainWindow, quitApp } from "main/index"; +import { focusMainWindow, requestQuit } from "main/index"; +// FORK NOTE: upstream renamed host-service-manager → host-service-coordinator (#3250) import { getHostServiceCoordinator, type HostServiceStatusEvent, @@ -49,6 +50,11 @@ function getTrayIconPath(): string | null { } let tray: Tray | null = null; +// FORK NOTE: bump on each updateTrayMenu entry so overlapping async runs +// can drop stale results. Without this, a late-returning fetch would +// rebuild the menu from a snapshot of orgIds taken before a status change, +// re-introducing stopped services and the wrong Quit-mode variant. +let trayUpdateToken = 0; function createTrayIcon(): Electron.NativeImage | null { const iconPath = getTrayIconPath(); @@ -185,6 +191,7 @@ function buildHostServiceSubmenu( async function updateTrayMenu(): Promise { if (!tray) return; + const token = ++trayUpdateToken; const coordinator = getHostServiceCoordinator(); const orgIds = coordinator.getActiveOrganizationIds(); @@ -196,7 +203,9 @@ async function updateTrayMenu(): Promise { if (info) infos.set(orgId, info); } - if (!tray) return; + // Drop results if a newer updateTrayMenu has already started — otherwise + // an older snapshot can overwrite the newer one and show stale state. + if (!tray || token !== trayUpdateToken) return; const hasActive = orgIds.length > 0; const hostServiceLabel = hasActive @@ -228,10 +237,26 @@ async function updateTrayMenu(): Promise { }, }, { type: "separator" }, - { - label: "Quit Superset", - click: () => quitApp(), - }, + // FORK NOTE: fork supports two quit modes — release (keep host-services + // alive for reattach) vs stop (fully tear them down). Preserved from + // pre-#3458 fork tray. + ...(hasActive + ? [ + { + label: "Quit (Keep Services Running)", + click: () => requestQuit("release"), + }, + { + label: "Quit & Stop Services", + click: () => requestQuit("stop"), + }, + ] + : [ + { + label: "Quit", + click: () => requestQuit("release"), + }, + ]), ]); tray.setContextMenu(menu); diff --git a/apps/desktop/src/main/lib/vibrancy/emitter.ts b/apps/desktop/src/main/lib/vibrancy/emitter.ts new file mode 100644 index 00000000000..631000cab55 --- /dev/null +++ b/apps/desktop/src/main/lib/vibrancy/emitter.ts @@ -0,0 +1,25 @@ +import { EventEmitter } from "node:events"; +import type { VibrancyState } from "./index"; + +export const VIBRANCY_EVENTS = { + CHANGED: "vibrancy:changed", +} as const; + +type VibrancyEvents = { + [VIBRANCY_EVENTS.CHANGED]: [VibrancyState]; +}; + +export const vibrancyEmitter = new EventEmitter() as EventEmitter & { + on( + event: K, + listener: (...args: VibrancyEvents[K]) => void, + ): EventEmitter; + off( + event: K, + listener: (...args: VibrancyEvents[K]) => void, + ): EventEmitter; + emit( + event: K, + ...args: VibrancyEvents[K] + ): boolean; +}; diff --git a/apps/desktop/src/main/lib/vibrancy/index.ts b/apps/desktop/src/main/lib/vibrancy/index.ts new file mode 100644 index 00000000000..8c35904d4c3 --- /dev/null +++ b/apps/desktop/src/main/lib/vibrancy/index.ts @@ -0,0 +1,245 @@ +import { + isNativeBlurAvailable, + setWindowBlurRadius, +} from "@superset/macos-window-blur"; +import type { BrowserWindow } from "electron"; +import { PLATFORM } from "shared/constants"; +import { + DEFAULT_VIBRANCY_STATE, + VIBRANCY_BLUR_RADIUS_MAX, + VIBRANCY_BLUR_RADIUS_MIN, + VIBRANCY_OPACITY_MAX, + VIBRANCY_OPACITY_MIN, + type VibrancyBlurLevel, + type VibrancyState, +} from "shared/vibrancy-types"; + +export { + DEFAULT_VIBRANCY_STATE, + type VibrancyBlurLevel, + type VibrancyState, +} from "shared/vibrancy-types"; + +const BLUR_TO_ELECTRON_VIBRANCY: Record< + VibrancyBlurLevel, + "sidebar" | "header" | "content" | "fullscreen-ui" +> = { + subtle: "sidebar", + standard: "header", + strong: "content", + ultra: "fullscreen-ui", +}; + +// Ember dark / Superset light background colors used when vibrancy is off. +const OPAQUE_DARK = "#151110"; +const OPAQUE_LIGHT = "#ffffff"; + +const DARK_RGB = { r: 21, g: 17, b: 16 }; +const LIGHT_RGB = { r: 255, g: 255, b: 255 }; + +export function isVibrancySupported(): boolean { + return PLATFORM.IS_MAC; +} + +/** + * Clamp opacity to the supported range defined in shared/vibrancy-types. + */ +export function normalizeVibrancyState( + partial: Partial, + base: VibrancyState = DEFAULT_VIBRANCY_STATE, +): VibrancyState { + const opacity = + partial.opacity === undefined + ? base.opacity + : Math.max( + VIBRANCY_OPACITY_MIN, + Math.min(VIBRANCY_OPACITY_MAX, Math.round(partial.opacity)), + ); + const blurLevel: VibrancyBlurLevel = + partial.blurLevel && partial.blurLevel in BLUR_TO_ELECTRON_VIBRANCY + ? partial.blurLevel + : base.blurLevel; + const blurRadius = + partial.blurRadius === undefined + ? base.blurRadius + : Math.max( + VIBRANCY_BLUR_RADIUS_MIN, + Math.min(VIBRANCY_BLUR_RADIUS_MAX, Math.round(partial.blurRadius)), + ); + return { + enabled: partial.enabled ?? base.enabled, + opacity, + blurLevel, + blurRadius, + }; +} + +/** + * Whether the native CIGaussianBlur addon loaded successfully on this + * machine. When false, the vibrancy slider UI should fall back to the + * four-step blurLevel selection. + */ +export function isNativeContinuousBlurSupported(): boolean { + return isVibrancySupported() && isNativeBlurAvailable(); +} + +function toHexAlpha(opacityPercent: number): string { + const alpha = Math.round((opacityPercent / 100) * 255); + return alpha.toString(16).padStart(2, "0"); +} + +/** + * Build an #RRGGBBAA color string using the current theme brightness and the + * vibrancy opacity slider. `opacity` here means "how transparent the chrome + * becomes when vibrancy is active" — 0 = fully see-through, 100 = opaque. + * + * When vibrancy is disabled we return a fully opaque color so the window + * renders identically to the pre-vibrancy build. + */ +export function computeBackgroundColor( + state: VibrancyState, + isDark: boolean, +): string { + if (!state.enabled) { + return isDark ? OPAQUE_DARK : OPAQUE_LIGHT; + } + const rgb = isDark ? DARK_RGB : LIGHT_RGB; + // Slider 100 = opaque; lower values = more transparent so desktop shows through. + const alphaHex = toHexAlpha(state.opacity); + const toHex = (n: number) => n.toString(16).padStart(2, "0"); + return `#${toHex(rgb.r)}${toHex(rgb.g)}${toHex(rgb.b)}${alphaHex}`; +} + +export function resolveVibrancyType( + state: VibrancyState, +): "sidebar" | "header" | "content" | "fullscreen-ui" | null { + if (!state.enabled) return null; + return BLUR_TO_ELECTRON_VIBRANCY[state.blurLevel]; +} + +/** + * Apply the current vibrancy state to a BrowserWindow. Only has effect on + * macOS — on other platforms this is a no-op so callers can invoke it + * unconditionally. + */ +export function applyVibrancy( + window: BrowserWindow, + state: VibrancyState, + isDark: boolean, +): void { + if (window.isDestroyed()) return; + if (!isVibrancySupported()) return; + + const vibrancyType = resolveVibrancyType(state); + const backgroundColor = computeBackgroundColor(state, isDark); + + window.setBackgroundColor(backgroundColor); + // Electron's setVibrancy accepts `null` to clear the effect — the type + // definition in Electron 30+ includes `string | null`, so the value + // returned by resolveVibrancyType can be passed through directly. + window.setVibrancy(vibrancyType); + + scheduleNativeBlur(window, state); +} + +// --- Native blur scheduling ---------------------------------------------- +// Each window tracks the "latest requested radius" plus a list of pending +// retry timers. When a new applyVibrancy call arrives we: +// 1. Update the latest radius for that window +// 2. Cancel any still-pending retries from older calls +// 3. Schedule a fresh burst of retries that all read from `latestRadius` +// This kills a subtle race where a user dragging the blur slider quickly +// would have an old value's 180ms retry land after a newer value was +// already applied, clobbering it. + +interface BlurSchedule { + latestRadius: number; + timers: ReturnType[]; +} + +const blurSchedules = new WeakMap(); + +function scheduleNativeBlur(window: BrowserWindow, state: VibrancyState): void { + if (!isNativeBlurAvailable()) return; + + const radius = state.enabled ? state.blurRadius : 0; + let schedule = blurSchedules.get(window); + if (!schedule) { + schedule = { latestRadius: radius, timers: [] }; + blurSchedules.set(window, schedule); + } else { + schedule.latestRadius = radius; + for (const timer of schedule.timers) clearTimeout(timer); + schedule.timers.length = 0; + } + + const handle = window.getNativeWindowHandle(); + const apply = (): void => { + if (window.isDestroyed()) return; + const current = blurSchedules.get(window); + if (!current) return; + try { + setWindowBlurRadius(handle, current.latestRadius); + } catch (error) { + console.warn("[vibrancy] setWindowBlurRadius failed:", error); + } + }; + + // Immediate apply + retries that stretch long enough to beat the + // NSVisualEffectView's own lazy refresh cycle. + apply(); + const delays = [16, 64, 180, 480, 960]; + for (const delay of delays) { + const timer = setTimeout(() => { + if (!schedule) return; + const index = schedule.timers.indexOf(timer); + if (index >= 0) schedule.timers.splice(index, 1); + apply(); + }, delay); + schedule.timers.push(timer); + } +} + +/** + * Options that callers should spread into the BrowserWindow constructor on + * macOS so that vibrancy can later be toggled dynamically via + * `setVibrancy` / `setBackgroundColor` without recreating the window. + * + * `transparent: true` is required at construction time — it cannot be + * toggled later — so we always opt in on macOS even when the user has + * vibrancy disabled. The opaque background color we set keeps the window + * visually identical to the pre-vibrancy build until the user enables it. + */ +export function getInitialWindowOptions( + state: VibrancyState, + isDark: boolean, +): { + transparent?: boolean; + vibrancy?: "sidebar" | "header" | "content" | "fullscreen-ui"; + visualEffectState?: "followWindow" | "active" | "inactive"; + backgroundColor: string; +} { + if (!isVibrancySupported()) { + return { + backgroundColor: isDark ? OPAQUE_DARK : OPAQUE_LIGHT, + }; + } + + const backgroundColor = computeBackgroundColor(state, isDark); + // Always attach NSVisualEffectView at construction time, even when the + // user has vibrancy disabled. The opaque backgroundColor fully covers + // the vibrancy layer while it's off, but having it already mounted + // means the first OFF→ON toggle can just change setBackgroundColor's + // alpha — no window recreation / restart required. Previously we only + // attached vibrancy when enabled, which meant `setVibrancy` first-time + // attachment wouldn't fully take effect until next launch. + const vibrancyType = + resolveVibrancyType(state) ?? BLUR_TO_ELECTRON_VIBRANCY[state.blurLevel]; + + return { + transparent: true, + vibrancy: vibrancyType, + visualEffectState: "active", + backgroundColor, + }; +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/commands.ts b/apps/desktop/src/main/lib/vscode-shim/api/commands.ts new file mode 100644 index 00000000000..bcf0b3d0a8e --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/commands.ts @@ -0,0 +1,215 @@ +/** + * VS Code commands API shim. + */ + +import { shimLog, shimWarn } from "./debug-log"; +import { Disposable } from "./event-emitter"; +import { Uri } from "./uri"; +import { fireOpenDiff, fireOpenFile } from "./window"; +import { resolveTextDocumentContent } from "./workspace"; + +const UNHANDLED = Symbol("unhandled"); + +function toUri( + value: + | Uri + | { + scheme?: string; + authority?: string; + path?: string; + query?: string; + fragment?: string; + } + | undefined, +): Uri | undefined { + if (!value) { + return undefined; + } + if (value instanceof Uri) { + return value; + } + if (value.scheme) { + return Uri.from({ + scheme: value.scheme, + authority: value.authority, + path: value.path, + query: value.query, + fragment: value.fragment, + }); + } + return undefined; +} + +/** + * Handle VS Code built-in commands that extensions expect to work. + * Returns UNHANDLED if the command is not a known built-in. + */ +function handleBuiltinCommand( + command: string, + args: unknown[], +): unknown | typeof UNHANDLED { + switch (command) { + // Diff view (Claude Code / Codex uses this for file diffs) + case "vscode.diff": { + const leftUri = args[0] as + | { + fsPath?: string; + toString?(): string; + scheme?: string; + authority?: string; + path?: string; + query?: string; + fragment?: string; + } + | undefined; + const rightUri = args[1] as + | { fsPath?: string; toString?(): string } + | undefined; + const title = args[2] as string | undefined; + const left = leftUri?.fsPath ?? leftUri?.toString?.() ?? ""; + const right = rightUri?.fsPath ?? rightUri?.toString?.() ?? ""; + shimLog(`[vscode-shim] vscode.diff called: ${left} → ${right}`); + if (!left || !right) { + return undefined; + } + + const resolvedLeftUri = toUri(leftUri); + if (!resolvedLeftUri || resolvedLeftUri.scheme === "file") { + fireOpenDiff(left, right, title); + return undefined; + } + + return resolveTextDocumentContent(resolvedLeftUri) + .then((leftContent) => { + fireOpenDiff(left, right, title, leftContent); + return undefined; + }) + .catch((error) => { + shimWarn( + "[vscode-shim] Failed to resolve diff baseline content:", + error, + ); + fireOpenDiff(left, right, title); + return undefined; + }); + } + + // Open file + case "vscode.open": { + const uri = args[0] as + | { fsPath?: string; scheme?: string; toString?(): string } + | undefined; + shimLog(`[vscode-shim] vscode.open called with`, uri); + if (uri?.scheme === "file" && uri.fsPath) { + fireOpenFile(uri.fsPath); + } + return undefined; + } + + case "vscode.openFolder": + case "kimi.webview.focus": + return undefined; + + // Reveal file in OS file manager + case "revealFileInOS": + case "revealInExplorer": { + try { + const uri = args[0] as { fsPath?: string }; + if (uri?.fsPath) { + const { shell } = require("electron"); + shell.showItemInFolder(uri.fsPath); + } + } catch {} + return undefined; + } + + // Focus editor + case "workbench.action.focusFirstEditorGroup": + case "workbench.action.lockEditorGroup": + return undefined; + + // Reload window (Codex uses this) + case "workbench.action.reloadWindow": { + try { + const { BrowserWindow } = require("electron"); + const win = BrowserWindow.getFocusedWindow(); + win?.reload(); + } catch {} + return undefined; + } + + // Open settings + case "workbench.action.openSettings": + case "workbench.action.openGlobalKeybindings": + case "workbench.action.showCommands": + // These don't have direct Superset equivalents + return undefined; + + // Close active editor + case "workbench.action.revertAndCloseActiveEditor": + case "workbench.action.moveEditorToNewWindow": + return undefined; + + // Speech/dictation (Claude Code) + case "workbench.action.editorDictation.start": + case "workbench.action.editorDictation.stop": + return undefined; + + // Notebook (Claude Code) + case "notebook.cell.execute": + return undefined; + + default: + return UNHANDLED; + } +} + +type CommandHandler = (...args: unknown[]) => unknown; + +const registry = new Map(); +const contextState = new Map(); + +export function getContextValue(key: string): unknown { + return contextState.get(key); +} + +export const commands = { + registerCommand( + command: string, + callback: CommandHandler, + _thisArg?: unknown, + ): Disposable { + registry.set(command, callback); + return new Disposable(() => { + registry.delete(command); + }); + }, + + async executeCommand( + command: string, + ...args: unknown[] + ): Promise { + if (command === "setContext") { + const [key, value] = args; + contextState.set(key as string, value); + return undefined as T; + } + + // Handle VS Code built-in commands + const builtinResult = handleBuiltinCommand(command, args); + if (builtinResult !== UNHANDLED) { + return builtinResult as T; + } + + const handler = registry.get(command); + if (!handler) { + shimWarn(`[vscode-shim] Command not found: ${command}`); + return undefined as T; + } + return (await handler(...args)) as T; + }, + + getCommands(_filterInternal?: boolean): Promise { + return Promise.resolve([...registry.keys()]); + }, +}; diff --git a/apps/desktop/src/main/lib/vscode-shim/api/configuration.ts b/apps/desktop/src/main/lib/vscode-shim/api/configuration.ts new file mode 100644 index 00000000000..d5d898cf092 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/configuration.ts @@ -0,0 +1,119 @@ +/** + * VS Code workspace configuration shim. + */ + +import fs from "node:fs"; +import path from "node:path"; +import type { ExtensionManifest } from "../types"; +import { EventEmitter } from "./event-emitter"; + +interface ConfigurationChangeEvent { + affectsConfiguration(section: string, _scope?: unknown): boolean; +} + +const _onDidChangeConfiguration = new EventEmitter(); +export const onDidChangeConfiguration = _onDidChangeConfiguration.event; + +function getUserDataPath(): string { + try { + const { app } = require("electron"); + return app.getPath("userData"); + } catch { + return path.join(require("node:os").homedir(), ".superset-desktop"); + } +} + +const configFilePath = path.join( + getUserDataPath(), + "vscode-extension-settings.json", +); + +let configData: Record = {}; + +function loadConfig(): void { + try { + if (fs.existsSync(configFilePath)) { + configData = JSON.parse(fs.readFileSync(configFilePath, "utf-8")); + } + } catch { + configData = {}; + } +} + +function saveConfig(): void { + try { + fs.mkdirSync(path.dirname(configFilePath), { recursive: true }); + fs.writeFileSync(configFilePath, JSON.stringify(configData, null, 2)); + } catch (err) { + console.error("[vscode-shim] Failed to save config:", err); + } +} + +loadConfig(); + +/** Merge defaults from extension contributes.configuration into config */ +export function registerExtensionDefaults(manifest: ExtensionManifest): void { + const configs = manifest.contributes?.configuration; + if (!configs) return; + const schemas = Array.isArray(configs) ? configs : [configs]; + for (const schema of schemas) { + if (!schema.properties) continue; + for (const [key, prop] of Object.entries(schema.properties)) { + if (prop.default !== undefined && configData[key] === undefined) { + configData[key] = prop.default; + } + } + } +} + +class WorkspaceConfiguration { + private _section: string; + + constructor(section: string) { + this._section = section; + } + + get(key: string, defaultValue?: T): T { + const fullKey = this._section ? `${this._section}.${key}` : key; + const value = configData[fullKey]; + return (value !== undefined ? value : defaultValue) as T; + } + + has(key: string): boolean { + const fullKey = this._section ? `${this._section}.${key}` : key; + return fullKey in configData; + } + + inspect( + key: string, + ): { key: string; defaultValue?: T; globalValue?: T } | undefined { + const fullKey = this._section ? `${this._section}.${key}` : key; + return { + key: fullKey, + globalValue: configData[fullKey] as T | undefined, + }; + } + + async update( + key: string, + value: unknown, + _configurationTarget?: unknown, + _overrideInLanguage?: boolean, + ): Promise { + const fullKey = this._section ? `${this._section}.${key}` : key; + configData[fullKey] = value; + saveConfig(); + _onDidChangeConfiguration.fire({ + affectsConfiguration(section: string) { + return fullKey.startsWith(section); + }, + }); + } +} + +export function getConfiguration( + section?: string, + _scope?: unknown, +): WorkspaceConfiguration { + return new WorkspaceConfiguration(section ?? ""); +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/debug-log.ts b/apps/desktop/src/main/lib/vscode-shim/api/debug-log.ts new file mode 100644 index 00000000000..4adf286e635 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/debug-log.ts @@ -0,0 +1,21 @@ +/** + * Conditional debug logger for vscode-shim. + * Logs are shown in development mode (bun dev) but suppressed in production builds. + */ + +const IS_DEV = + process.env.NODE_ENV === "development" || + process.env.DEBUG_VSCODE_SHIM === "1"; + +export function shimLog(...args: unknown[]): void { + if (IS_DEV) console.log(...args); +} + +export function shimWarn(...args: unknown[]): void { + if (IS_DEV) console.warn(...args); +} + +export function shimError(...args: unknown[]): void { + // Always log errors + console.error(...args); +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/event-emitter.ts b/apps/desktop/src/main/lib/vscode-shim/api/event-emitter.ts new file mode 100644 index 00000000000..d66779ad4f4 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/event-emitter.ts @@ -0,0 +1,91 @@ +/** + * VS Code EventEmitter and Disposable shim. + */ + +export type Event = ( + listener: (e: T) => unknown, + thisArgs?: unknown, + disposables?: Disposable[], +) => Disposable; + +export class Disposable { + private _callOnDispose: (() => void) | undefined; + + constructor(callOnDispose: () => void) { + this._callOnDispose = callOnDispose; + } + + static from(...disposables: { dispose(): unknown }[]): Disposable { + return new Disposable(() => { + for (const d of disposables) { + d.dispose(); + } + }); + } + + dispose(): void { + this._callOnDispose?.(); + this._callOnDispose = undefined; + } +} + +export class EventEmitter { + private _listeners: Array<{ fn: (e: T) => unknown; thisArgs?: unknown }> = []; + private _disposed = false; + + readonly event: Event = ( + listener: (e: T) => unknown, + thisArgs?: unknown, + disposables?: Disposable[], + ): Disposable => { + const entry = { fn: listener, thisArgs }; + this._listeners.push(entry); + const disposable = new Disposable(() => { + const idx = this._listeners.indexOf(entry); + if (idx >= 0) this._listeners.splice(idx, 1); + }); + if (disposables) disposables.push(disposable); + return disposable; + }; + + fire(data: T): void { + if (this._disposed) return; + for (const { fn, thisArgs } of [...this._listeners]) { + fn.call(thisArgs, data); + } + } + + dispose(): void { + this._disposed = true; + this._listeners.length = 0; + } +} + +export class CancellationTokenSource { + private _emitter = new EventEmitter(); + private _isCancelled = false; + + readonly token: CancellationToken = { + isCancellationRequested: false, + onCancellationRequested: this._emitter.event, + }; + + cancel(): void { + if (!this._isCancelled) { + this._isCancelled = true; + ( + this.token as { isCancellationRequested: boolean } + ).isCancellationRequested = true; + this._emitter.fire(undefined as undefined); + } + } + + dispose(): void { + this._emitter.dispose(); + } +} + +export interface CancellationToken { + readonly isCancellationRequested: boolean; + readonly onCancellationRequested: Event; +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/extension-context.ts b/apps/desktop/src/main/lib/vscode-shim/api/extension-context.ts new file mode 100644 index 00000000000..2bac13360b5 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/extension-context.ts @@ -0,0 +1,246 @@ +/** + * VS Code ExtensionContext shim. + */ + +import fs from "node:fs"; +import path from "node:path"; + +function getUserDataPath(): string { + try { + return require("electron").app.getPath("userData"); + } catch { + return require("node:path").join( + require("node:os").homedir(), + ".superset-desktop", + ); + } +} + +import type { ExtensionManifest } from "../types"; +import { type Disposable, type Event, EventEmitter } from "./event-emitter"; +import { Uri } from "./uri"; + +class Memento { + private _data: Record; + private _filePath: string; + + constructor(filePath: string) { + this._filePath = filePath; + try { + this._data = fs.existsSync(filePath) + ? JSON.parse(fs.readFileSync(filePath, "utf-8")) + : {}; + } catch { + this._data = {}; + } + } + + get(key: string, defaultValue?: T): T { + const val = this._data[key]; + return (val !== undefined ? val : defaultValue) as T; + } + + async update(key: string, value: unknown): Promise { + if (value === undefined) { + delete this._data[key]; + } else { + this._data[key] = value; + } + fs.mkdirSync(path.dirname(this._filePath), { recursive: true }); + fs.writeFileSync(this._filePath, JSON.stringify(this._data, null, 2)); + } + + keys(): readonly string[] { + return Object.keys(this._data); + } +} + +function getSafeStorage(): { + encryptString(plainText: string): Buffer; + decryptString(encrypted: Buffer): string; + isEncryptionAvailable(): boolean; +} | null { + try { + // eslint-disable-next-line @typescript-eslint/no-require-imports + return require("electron").safeStorage as { + encryptString(plainText: string): Buffer; + decryptString(encrypted: Buffer): string; + isEncryptionAvailable(): boolean; + }; + } catch { + return null; + } +} + +class SecretStorage { + private _data = new Map(); + private _onDidChange = new EventEmitter<{ key: string }>(); + private _filePath: string; + readonly onDidChange: Event<{ key: string }> = this._onDidChange.event; + + constructor(filePath: string) { + this._filePath = filePath; + try { + if (fs.existsSync(filePath)) { + const raw = fs.readFileSync(filePath, "utf-8"); + const parsed = JSON.parse(raw) as Record; + const safeStorage = getSafeStorage(); + for (const [k, v] of Object.entries(parsed)) { + try { + if (safeStorage?.isEncryptionAvailable()) { + const buf = Buffer.from(v, "base64"); + this._data.set(k, safeStorage.decryptString(buf)); + } else { + this._data.set(k, v); + } + } catch { + // If decryption fails (e.g. key changed), store as-is + this._data.set(k, v); + } + } + } + } catch {} + } + + private _persist(): void { + try { + const safeStorage = getSafeStorage(); + const obj: Record = {}; + for (const [k, v] of this._data) { + if (safeStorage?.isEncryptionAvailable()) { + obj[k] = safeStorage.encryptString(v).toString("base64"); + } else { + obj[k] = v; + } + } + fs.mkdirSync(path.dirname(this._filePath), { recursive: true }); + fs.writeFileSync(this._filePath, JSON.stringify(obj, null, 2)); + } catch {} + } + + async get(key: string): Promise { + return this._data.get(key); + } + + async store(key: string, value: string): Promise { + this._data.set(key, value); + this._persist(); + this._onDidChange.fire({ key }); + } + + async delete(key: string): Promise { + this._data.delete(key); + this._persist(); + this._onDidChange.fire({ key }); + } +} + +interface EnvironmentVariableCollection { + persistent: boolean; + description: string | undefined; + replace(variable: string, value: string, options?: unknown): void; + append(variable: string, value: string, options?: unknown): void; + prepend(variable: string, value: string, options?: unknown): void; + get(variable: string): unknown; + delete(variable: string): void; + clear(): void; + forEach( + callback: (variable: string, mutator: unknown, collection: unknown) => void, + ): void; + [Symbol.iterator](): Iterator<[string, unknown]>; +} + +function createEnvironmentVariableCollection(): EnvironmentVariableCollection { + const vars = new Map(); + return { + persistent: true, + description: undefined, + replace(variable: string, value: string) { + vars.set(variable, { type: 1, value }); + }, + append(variable: string, value: string) { + vars.set(variable, { type: 2, value }); + }, + prepend(variable: string, value: string) { + vars.set(variable, { type: 3, value }); + }, + get(variable: string) { + return vars.get(variable); + }, + delete(variable: string) { + vars.delete(variable); + }, + clear() { + vars.clear(); + }, + forEach(callback) { + for (const [k, v] of vars) callback(k, v, this); + }, + *[Symbol.iterator]() { + yield* vars.entries(); + }, + }; +} + +export interface VscodeExtensionContext { + subscriptions: Disposable[]; + extensionPath: string; + extensionUri: Uri; + globalState: Memento; + workspaceState: Memento; + secrets: SecretStorage; + storageUri: Uri | undefined; + globalStorageUri: Uri; + logUri: Uri; + storagePath: string | undefined; + globalStoragePath: string; + logPath: string; + extensionMode: number; + environmentVariableCollection: EnvironmentVariableCollection; + extension: { + id: string; + extensionPath: string; + packageJSON: ExtensionManifest; + }; + asAbsolutePath(relativePath: string): string; +} + +export function createExtensionContext( + extensionId: string, + extensionPath: string, + manifest: ExtensionManifest, +): VscodeExtensionContext { + const storageBase = path.join(getUserDataPath(), "vscode-extensions"); + const globalStoragePath = path.join(storageBase, extensionId, "global"); + const storagePath = path.join(storageBase, extensionId, "workspace"); + const logPath = path.join(storageBase, extensionId, "logs"); + + fs.mkdirSync(globalStoragePath, { recursive: true }); + fs.mkdirSync(storagePath, { recursive: true }); + fs.mkdirSync(logPath, { recursive: true }); + + return { + subscriptions: [], + extensionPath, + extensionUri: Uri.file(extensionPath), + globalState: new Memento(path.join(globalStoragePath, "state.json")), + workspaceState: new Memento(path.join(storagePath, "state.json")), + secrets: new SecretStorage(path.join(globalStoragePath, "secrets.json")), + storageUri: Uri.file(storagePath), + globalStorageUri: Uri.file(globalStoragePath), + logUri: Uri.file(logPath), + storagePath, + globalStoragePath, + logPath, + extensionMode: 1, + environmentVariableCollection: createEnvironmentVariableCollection(), + extension: { + id: extensionId, + extensionPath, + packageJSON: manifest, + }, + asAbsolutePath(relativePath: string): string { + return path.join(extensionPath, relativePath); + }, + }; +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/glob-utils.test.ts b/apps/desktop/src/main/lib/vscode-shim/api/glob-utils.test.ts new file mode 100644 index 00000000000..d1a7451b700 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/glob-utils.test.ts @@ -0,0 +1,161 @@ +import { describe, expect, it } from "bun:test"; +import { + compileGlobMatchers, + compileGlobPatterns, + directoryMayContainMatches, + expandBracePatterns, + globToRegExp, + matchesAnyGlob, + normalizeGlobPath, +} from "./glob-utils"; + +describe("normalizeGlobPath", () => { + it("leaves forward slashes unchanged", () => { + expect(normalizeGlobPath("src/deep/file.ts")).toBe("src/deep/file.ts"); + }); + + it("normalizes platform separator to forward slashes", () => { + const sep = process.platform === "win32" ? "\\" : "/"; + const input = `src${sep}deep${sep}file.ts`; + expect(normalizeGlobPath(input)).toBe("src/deep/file.ts"); + }); +}); + +describe("globToRegExp", () => { + it("matches literal paths", () => { + const re = globToRegExp("src/index.ts"); + expect(re.test("src/index.ts")).toBe(true); + expect(re.test("src/other.ts")).toBe(false); + }); + + it("matches single * (non-separator)", () => { + const re = globToRegExp("*.ts"); + expect(re.test("foo.ts")).toBe(true); + expect(re.test("bar.ts")).toBe(true); + expect(re.test("dir/foo.ts")).toBe(false); + }); + + it("matches **/*.ts recursively", () => { + const re = globToRegExp("**/*.ts"); + expect(re.test("foo.ts")).toBe(true); + expect(re.test("src/foo.ts")).toBe(true); + expect(re.test("src/deep/foo.ts")).toBe(true); + expect(re.test("foo.js")).toBe(false); + }); + + it("matches **/ prefix", () => { + const re = globToRegExp("**/node_modules"); + expect(re.test("node_modules")).toBe(true); + expect(re.test("packages/foo/node_modules")).toBe(true); + }); + + it("matches ? as single non-separator", () => { + const re = globToRegExp("file?.ts"); + expect(re.test("file1.ts")).toBe(true); + expect(re.test("fileA.ts")).toBe(true); + expect(re.test("file.ts")).toBe(false); + expect(re.test("file12.ts")).toBe(false); + }); + + it("matches character class [...]", () => { + const re = globToRegExp("file[0-9].ts"); + expect(re.test("file0.ts")).toBe(true); + expect(re.test("file9.ts")).toBe(true); + expect(re.test("filea.ts")).toBe(false); + }); + + it("escapes special regex chars", () => { + const re = globToRegExp("file.name.ts"); + expect(re.test("file.name.ts")).toBe(true); + expect(re.test("fileXname.ts")).toBe(false); + }); + + it("handles unclosed [ as literal", () => { + const re = globToRegExp("file[.ts"); + expect(re.test("file[.ts")).toBe(true); + }); +}); + +describe("expandBracePatterns", () => { + it("expands simple braces", () => { + expect(expandBracePatterns("{a,b,c}")).toEqual(["a", "b", "c"]); + }); + + it("expands braces with prefix and suffix", () => { + expect(expandBracePatterns("src/*.{ts,js}")).toEqual([ + "src/*.ts", + "src/*.js", + ]); + }); + + it("handles nested braces", () => { + expect(expandBracePatterns("{a,{b,c}}")).toEqual(["a", "b", "c"]); + }); + + it("returns pattern unchanged if no braces", () => { + expect(expandBracePatterns("**/*.ts")).toEqual(["**/*.ts"]); + }); + + it("handles escaped braces", () => { + expect(expandBracePatterns("\\{a,b}")).toEqual(["\\{a,b}"]); + }); +}); + +describe("compileGlobPatterns", () => { + it("returns empty for null/undefined/empty", () => { + expect(compileGlobPatterns(null)).toEqual([]); + expect(compileGlobPatterns(undefined)).toEqual([]); + expect(compileGlobPatterns("")).toEqual([]); + expect(compileGlobPatterns(" ")).toEqual([]); + }); + + it("returns single pattern for simple glob", () => { + expect(compileGlobPatterns("**/*.ts")).toEqual(["**/*.ts"]); + }); + + it("expands brace patterns", () => { + expect(compileGlobPatterns("{**/*.ts,**/*.js}")).toEqual([ + "**/*.ts", + "**/*.js", + ]); + }); +}); + +describe("matchesAnyGlob", () => { + it("returns false for empty matchers", () => { + expect(matchesAnyGlob([], "foo.ts")).toBe(false); + }); + + it("matches with compiled matchers", () => { + const matchers = compileGlobMatchers("**/*.ts"); + expect(matchesAnyGlob(matchers, "src/foo.ts")).toBe(true); + expect(matchesAnyGlob(matchers, "src/foo.js")).toBe(false); + }); + + it("matches default exclude globs", () => { + const matchers = compileGlobMatchers("{**/.git,**/node_modules}"); + expect(matchesAnyGlob(matchers, "node_modules")).toBe(true); + expect(matchesAnyGlob(matchers, "packages/foo/node_modules")).toBe(true); + expect(matchesAnyGlob(matchers, ".git")).toBe(true); + expect(matchesAnyGlob(matchers, "src/index.ts")).toBe(false); + }); +}); + +describe("directoryMayContainMatches", () => { + it("returns true for empty patterns", () => { + expect(directoryMayContainMatches("src", [])).toBe(true); + }); + + it("returns true when directory matches static prefix", () => { + expect(directoryMayContainMatches("src", ["src/**/*.ts"])).toBe(true); + expect(directoryMayContainMatches("src/deep", ["src/**/*.ts"])).toBe(true); + }); + + it("returns false when directory diverges from prefix", () => { + expect(directoryMayContainMatches("dist", ["src/**/*.ts"])).toBe(false); + }); + + it("returns true for patterns without static prefix", () => { + expect(directoryMayContainMatches("anything", ["**/*.ts"])).toBe(true); + }); +}); diff --git a/apps/desktop/src/main/lib/vscode-shim/api/glob-utils.ts b/apps/desktop/src/main/lib/vscode-shim/api/glob-utils.ts new file mode 100644 index 00000000000..bc1c971037f --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/glob-utils.ts @@ -0,0 +1,263 @@ +import path from "node:path"; + +/** + * Minimal glob-to-regexp utilities for the VS Code workspace shim. + * Handles the subset of glob syntax that VS Code extensions commonly use: + * `*`, `**`, `?`, `[...]`, and `{a,b}` brace expansion. + */ + +export function normalizeGlobPath(value: string): string { + return value.split(path.sep).join("/"); +} + +export function escapeRegexLiteral(value: string): string { + return value.replace(/[|\\{}()[\]^$+?.]/g, "\\$&"); +} + +export function globToRegExp(glob: string): RegExp { + let source = "^"; + + for (let index = 0; index < glob.length; index += 1) { + const char = glob[index]; + + if (char === "\\") { + const next = glob[index + 1]; + if (next) { + source += escapeRegexLiteral(next); + index += 1; + } else { + source += "\\\\"; + } + continue; + } + + if (char === "*") { + if (glob[index + 1] === "*") { + while (glob[index + 1] === "*") { + index += 1; + } + if (glob[index + 1] === "/") { + source += "(?:.*/)?"; + index += 1; + } else { + source += ".*"; + } + } else { + source += "[^/]*"; + } + continue; + } + + if (char === "?") { + source += "[^/]"; + continue; + } + + if (char === "[") { + const closingIndex = glob.indexOf("]", index + 1); + if (closingIndex === -1) { + source += "\\["; + } else { + source += glob.slice(index, closingIndex + 1); + index = closingIndex; + } + continue; + } + + source += escapeRegexLiteral(char); + } + + source += "$"; + return new RegExp(source); +} + +export function findFirstBraceRange( + pattern: string, +): { start: number; end: number; body: string } | null { + let braceStart = -1; + let depth = 0; + + for (let index = 0; index < pattern.length; index += 1) { + const char = pattern[index]; + if (char === "\\") { + index += 1; + continue; + } + if (char === "{") { + if (depth === 0) { + braceStart = index; + } + depth += 1; + continue; + } + if (char === "}") { + if (depth === 0 || braceStart < 0) { + continue; + } + depth -= 1; + if (depth === 0) { + return { + start: braceStart, + end: index, + body: pattern.slice(braceStart + 1, index), + }; + } + } + } + + return null; +} + +export function splitBraceOptions(body: string): string[] { + const options: string[] = []; + let depth = 0; + let current = ""; + + for (let index = 0; index < body.length; index += 1) { + const char = body[index]; + if (char === "\\") { + current += char; + if (index + 1 < body.length) { + current += body[index + 1]; + index += 1; + } + continue; + } + if (char === "{") { + depth += 1; + current += char; + continue; + } + if (char === "}") { + depth = Math.max(0, depth - 1); + current += char; + continue; + } + if (char === "," && depth === 0) { + options.push(current); + current = ""; + continue; + } + current += char; + } + + options.push(current); + return options; +} + +export function expandBracePatterns(pattern: string): string[] { + const braceRange = findFirstBraceRange(pattern); + if (!braceRange) { + return [pattern]; + } + + const prefix = pattern.slice(0, braceRange.start); + const suffix = pattern.slice(braceRange.end + 1); + const options = splitBraceOptions(braceRange.body); + + return options.flatMap((option) => + expandBracePatterns(`${prefix}${option}${suffix}`), + ); +} + +export function compileGlobPatterns( + pattern: string | null | undefined, +): string[] { + if (!pattern) { + return []; + } + + const normalized = pattern.trim(); + if (!normalized) { + return []; + } + + return expandBracePatterns(normalized) + .map((entry) => normalizeGlobPath(entry.trim())) + .filter(Boolean); +} + +export function compileGlobMatchers( + pattern: string | null | undefined, +): RegExp[] { + return compileGlobPatterns(pattern).map((entry) => globToRegExp(entry)); +} + +export function matchesAnyGlob( + matchers: RegExp[], + targetPath: string, +): boolean { + if (matchers.length === 0) { + return false; + } + + const normalizedTarget = normalizeGlobPath(targetPath); + return matchers.some((matcher) => matcher.test(normalizedTarget)); +} + +export function splitGlobSegments(pattern: string): string[] { + return normalizeGlobPath(pattern) + .split("/") + .map((segment) => segment.trim()) + .filter(Boolean); +} + +export function hasGlobMeta(segment: string): boolean { + let escaped = false; + + for (const char of segment) { + if (!escaped && char === "\\") { + escaped = true; + continue; + } + if (!escaped && (char === "*" || char === "?" || char === "[")) { + return true; + } + escaped = false; + } + + return false; +} + +export function getStaticGlobPrefixSegments(pattern: string): string[] { + const prefix: string[] = []; + + for (const segment of splitGlobSegments(pattern)) { + if (segment === "**" || hasGlobMeta(segment)) { + break; + } + prefix.push(segment); + } + + return prefix; +} + +export function directoryMayContainMatches( + relativeDirectory: string, + includePatterns: string[], +): boolean { + if (includePatterns.length === 0) { + return true; + } + + const directorySegments = splitGlobSegments(relativeDirectory); + + return includePatterns.some((pattern) => { + const prefixSegments = getStaticGlobPrefixSegments(pattern); + if (prefixSegments.length === 0) { + return true; + } + + const commonLength = Math.min( + directorySegments.length, + prefixSegments.length, + ); + for (let index = 0; index < commonLength; index += 1) { + if (directorySegments[index] !== prefixSegments[index]) { + return false; + } + } + + return true; + }); +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/output-channel.ts b/apps/desktop/src/main/lib/vscode-shim/api/output-channel.ts new file mode 100644 index 00000000000..99a47443b52 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/output-channel.ts @@ -0,0 +1,100 @@ +/** + * VS Code OutputChannel shim. + */ + +import { shimLog } from "./debug-log"; +import { Disposable } from "./event-emitter"; + +export class OutputChannel { + readonly name: string; + private _lines: string[] = []; + private _disposed = false; + + constructor(name: string) { + this.name = name; + } + + append(value: string): void { + if (this._disposed) return; + const last = this._lines.length - 1; + if (last >= 0) { + this._lines[last] += value; + } else { + this._lines.push(value); + } + } + + appendLine(value: string): void { + if (this._disposed) return; + shimLog(`[${this.name}] ${value}`); + this._lines.push(value); + } + + clear(): void { + this._lines.length = 0; + } + + show(_preserveFocus?: boolean): void { + // In future, could switch to an output tab in the UI + } + + hide(): void { + // noop + } + + replace(value: string): void { + this._lines = [value]; + } + + dispose(): void { + this._disposed = true; + this._lines.length = 0; + } +} + +export class LogOutputChannel extends OutputChannel { + trace(message: string, ..._args: unknown[]): void { + this.appendLine(`[TRACE] ${message}`); + } + + debug(message: string, ..._args: unknown[]): void { + this.appendLine(`[DEBUG] ${message}`); + } + + info(message: string, ..._args: unknown[]): void { + this.appendLine(`[INFO] ${message}`); + } + + warn(message: string, ..._args: unknown[]): void { + this.appendLine(`[WARN] ${message}`); + } + + error(error: string | Error, ..._args: unknown[]): void { + const msg = error instanceof Error ? error.message : error; + this.appendLine(`[ERROR] ${msg}`); + } +} + +const channels = new Map(); + +export function createOutputChannel( + name: string, + options?: { log: true } | string, +): OutputChannel { + const existing = channels.get(name); + if (existing) return existing; + + const channel = + options && typeof options === "object" && options.log + ? new LogOutputChannel(name) + : new OutputChannel(name); + channels.set(name, channel); + return channel; +} + +export function getOutputChannelDisposable(): Disposable { + return new Disposable(() => { + for (const ch of channels.values()) ch.dispose(); + channels.clear(); + }); +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/protocol-handler.ts b/apps/desktop/src/main/lib/vscode-shim/api/protocol-handler.ts new file mode 100644 index 00000000000..7935e2e8d15 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/protocol-handler.ts @@ -0,0 +1,86 @@ +/** + * Electron protocol handler for serving VS Code extension webview resources. + * + * Registers `vscode-webview-resource://` protocol to serve local files + * from extension directories. The main webview HTML is served by + * webview-server.ts instead (HTTP on localhost). + */ + +import fs from "node:fs"; +import os from "node:os"; +import path from "node:path"; +import { + createFileProtocolResponse, + MEDIA_MIME_TYPES, +} from "../../file-streaming"; +import { shimLog } from "./debug-log"; + +/** Allowed base directories for serving extension resources */ +const ALLOWED_ROOTS: string[] = [ + path.join(os.homedir(), ".vscode", "extensions"), + path.join(os.homedir(), ".vscode-insiders", "extensions"), +]; + +function isPathAllowed(filePath: string): boolean { + const resolved = path.resolve(filePath); + return ALLOWED_ROOTS.some( + (root) => resolved === root || resolved.startsWith(root + path.sep), + ); +} + +const MIME_TYPES: Record = { + ".html": "text/html", + ".css": "text/css", + ".js": "application/javascript", + ".mjs": "application/javascript", + ".json": "application/json", + ".svg": "image/svg+xml", + ".png": "image/png", + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".gif": "image/gif", + ".woff": "font/woff", + ".woff2": "font/woff2", + ".ttf": "font/ttf", + ".ico": "image/x-icon", + ...MEDIA_MIME_TYPES, +}; + +function getMimeType(filePath: string): string { + const ext = path.extname(filePath).toLowerCase(); + return MIME_TYPES[ext] ?? "application/octet-stream"; +} + +export function registerWebviewProtocol(): void { + try { + const { protocol } = require("electron"); + + protocol.handle("vscode-webview-resource", async (request: Request) => { + const url = new URL(request.url); + let filePath = decodeURIComponent(url.pathname); + + if (process.platform === "darwin" && filePath.startsWith("//")) { + filePath = filePath.slice(1); + } + + if (!isPathAllowed(filePath)) { + return new Response("Forbidden", { status: 403 }); + } + + if (!fs.existsSync(filePath)) { + return new Response("Not found", { status: 404 }); + } + + const mimeType = getMimeType(filePath); + + return createFileProtocolResponse(request, filePath, { + contentType: mimeType, + cacheControl: "public, max-age=3600", + }); + }); + + shimLog("[vscode-shim] Registered vscode-webview-resource:// protocol"); + } catch (err) { + console.error("[vscode-shim] Failed to register protocol handler:", err); + } +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/terminal-shim.ts b/apps/desktop/src/main/lib/vscode-shim/api/terminal-shim.ts new file mode 100644 index 00000000000..7edaa4ef870 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/terminal-shim.ts @@ -0,0 +1,210 @@ +/** + * VS Code Terminal API shim backed by DaemonTerminalManager. + */ + +import { randomUUID } from "node:crypto"; +import { shimLog } from "./debug-log"; +import { EventEmitter } from "./event-emitter"; +import { workspace } from "./workspace"; + +interface TerminalOptions { + name?: string; + cwd?: string; + env?: Record; + shellPath?: string; + shellArgs?: string[]; +} + +interface ShimTerminal { + readonly name: string; + readonly processId: Promise; + readonly exitStatus: { code: number | undefined } | undefined; + readonly shellIntegration?: ShellIntegration; + sendText(text: string, addNewLine?: boolean): void; + show(preserveFocus?: boolean): void; + hide(): void; + dispose(): void; +} + +interface ShellIntegration { + executeCommand(command: string): { + execution: { commandLine: string }; + read(): AsyncIterable; + }; +} + +const _onDidOpenTerminal = new EventEmitter(); +const _onDidCloseTerminal = new EventEmitter(); +const _onDidChangeActiveTerminal = new EventEmitter(); +const _onDidEndTerminalShellExecution = new EventEmitter(); +const _onDidChangeTerminalShellIntegration = new EventEmitter(); + +export const terminalEvents = { + onDidOpenTerminal: _onDidOpenTerminal.event, + onDidCloseTerminal: _onDidCloseTerminal.event, + onDidChangeActiveTerminal: _onDidChangeActiveTerminal.event, + onDidEndTerminalShellExecution: _onDidEndTerminalShellExecution.event, + onDidChangeTerminalShellIntegration: + _onDidChangeTerminalShellIntegration.event, +}; + +const activeTerminals: ShimTerminal[] = []; + +function getTerminalManager() { + try { + // Use dynamic import path that the bundler can resolve + // eslint-disable-next-line @typescript-eslint/no-var-requires + const mod = require("../../terminal") as { + // biome-ignore lint/suspicious/noExplicitAny: dynamic require for bundler compat + getDaemonTerminalManager: () => any; + }; + // biome-ignore lint/suspicious/noExplicitAny: dynamic require for bundler compat + return mod.getDaemonTerminalManager() as any; + } catch { + return null; + } +} + +export function createTerminal( + nameOrOptions?: string | TerminalOptions, +): ShimTerminal { + const opts: TerminalOptions = + typeof nameOrOptions === "string" + ? { name: nameOrOptions } + : (nameOrOptions ?? {}); + + const name = opts.name ?? "Extension Terminal"; + const paneId = `vscode-ext-terminal-${randomUUID()}`; + let exitStatus: { code: number | undefined } | undefined; + let pid: number | undefined; + + const manager = getTerminalManager(); + + // Create session asynchronously + const processIdPromise = (async () => { + if (!manager) return undefined; + try { + const result = await manager.createOrAttach({ + paneId, + tabId: `vscode-ext-tab-${paneId}`, + workspaceId: "vscode-extension-host", + cwd: opts.cwd ?? workspace.rootPath, + cols: 120, + rows: 30, + }); + pid = result?.snapshot?.pid; + // Listen for exit (store handler ref for cleanup in dispose) + const exitHandler = (exitCode: number) => { + exitStatus = { code: exitCode }; + _onDidCloseTerminal.fire(terminal); + const idx = activeTerminals.indexOf(terminal); + if (idx >= 0) activeTerminals.splice(idx, 1); + _onDidEndTerminalShellExecution.fire({ + terminal, + exitCode, + execution: { commandLine: { value: "" } }, + }); + // Self-cleanup + manager.off(`exit:${paneId}`, exitHandler); + }; + manager.on(`exit:${paneId}`, exitHandler); + return pid; + } catch (err) { + console.error(`[vscode-shim] Failed to create terminal "${name}":`, err); + return undefined; + } + })(); + + const terminal: ShimTerminal = { + name, + processId: processIdPromise, + get exitStatus() { + return exitStatus; + }, + get shellIntegration(): ShellIntegration | undefined { + if (!manager) return undefined; + return { + executeCommand(command: string) { + manager.write({ paneId, data: `${command}\n` }); + return { + execution: { commandLine: command }, + async *read() { + // Collect output using idle-timeout: yield when output + // stops for 200ms, or after max 30s total + const chunks: string[] = []; + const output: string = await new Promise((resolve) => { + let idleTimer: ReturnType; + const maxTimer = setTimeout(() => { + cleanup(); + resolve(chunks.join("")); + }, 30000); + + const handler = (data: string) => { + chunks.push(data); + clearTimeout(idleTimer); + idleTimer = setTimeout(() => { + cleanup(); + resolve(chunks.join("")); + }, 200); + }; + + const cleanup = () => { + clearTimeout(idleTimer); + clearTimeout(maxTimer); + manager.off(`data:${paneId}`, handler); + }; + + manager.on(`data:${paneId}`, handler); + // Initial timeout if no output at all + idleTimer = setTimeout(() => { + cleanup(); + resolve(chunks.join("")); + }, 2000); + }); + yield output; + }, + }; + }, + }; + }, + sendText(text: string, addNewLine = true) { + if (!manager) { + shimLog(`[vscode-shim] Terminal "${name}" sendText: ${text}`); + return; + } + manager.write({ + paneId, + data: addNewLine ? `${text}\n` : text, + }); + }, + show(_preserveFocus?: boolean) { + // Could focus the terminal in the UI + }, + hide() { + // noop + }, + dispose() { + if (manager) { + manager.off(`exit:${paneId}`, () => {}); + manager.kill(paneId).catch(() => {}); + } + const idx = activeTerminals.indexOf(terminal); + if (idx >= 0) activeTerminals.splice(idx, 1); + _onDidCloseTerminal.fire(terminal); + }, + }; + + activeTerminals.push(terminal); + _onDidOpenTerminal.fire(terminal); + _onDidChangeActiveTerminal.fire(terminal); + + return terminal; +} + +export function getTerminals(): ShimTerminal[] { + return [...activeTerminals]; +} + +export function getActiveTerminal(): ShimTerminal | undefined { + return activeTerminals[activeTerminals.length - 1]; +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/uri.ts b/apps/desktop/src/main/lib/vscode-shim/api/uri.ts new file mode 100644 index 00000000000..9f08e05ac1d --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/uri.ts @@ -0,0 +1,120 @@ +/** + * VS Code Uri shim. + */ + +import path from "node:path"; +import { URL } from "node:url"; + +export class Uri { + readonly scheme: string; + readonly authority: string; + readonly path: string; + readonly query: string; + readonly fragment: string; + + private constructor( + scheme: string, + authority: string, + uriPath: string, + query: string, + fragment: string, + ) { + this.scheme = scheme; + this.authority = authority; + this.path = uriPath; + this.query = query; + this.fragment = fragment; + } + + get fsPath(): string { + if (this.scheme === "file") { + return this.path.startsWith("/") ? this.path : `/${this.path}`; + } + return this.path; + } + + with(change: { + scheme?: string; + authority?: string; + path?: string; + query?: string; + fragment?: string; + }): Uri { + return new Uri( + change.scheme ?? this.scheme, + change.authority ?? this.authority, + change.path ?? this.path, + change.query ?? this.query, + change.fragment ?? this.fragment, + ); + } + + toString(): string { + if (this.scheme === "file") { + return `file://${this.path}`; + } + let result = `${this.scheme}://`; + if (this.authority) result += this.authority; + result += this.path; + if (this.query) result += `?${this.query}`; + if (this.fragment) result += `#${this.fragment}`; + return result; + } + + toJSON(): { + scheme: string; + authority: string; + path: string; + query: string; + fragment: string; + } { + return { + scheme: this.scheme, + authority: this.authority, + path: this.path, + query: this.query, + fragment: this.fragment, + }; + } + + static file(filePath: string): Uri { + const normalized = filePath.replace(/\\/g, "/"); + return new Uri("file", "", normalized, "", ""); + } + + static parse(value: string): Uri { + try { + const url = new URL(value); + return new Uri( + url.protocol.replace(":", ""), + url.hostname + (url.port ? `:${url.port}` : ""), + decodeURIComponent(url.pathname), + url.search.replace("?", ""), + url.hash.replace("#", ""), + ); + } catch { + return Uri.file(value); + } + } + + static from(components: { + scheme: string; + authority?: string; + path?: string; + query?: string; + fragment?: string; + }): Uri { + return new Uri( + components.scheme, + components.authority ?? "", + components.path ?? "", + components.query ?? "", + components.fragment ?? "", + ); + } + + static joinPath(base: Uri, ...pathSegments: string[]): Uri { + const joined = path.posix.join(base.path, ...pathSegments); + return base.with({ path: joined }); + } +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/webview-server.ts b/apps/desktop/src/main/lib/vscode-shim/api/webview-server.ts new file mode 100644 index 00000000000..8f4dc0b123b --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/webview-server.ts @@ -0,0 +1,371 @@ +/** + * Local HTTP server for serving VS Code extension webview content. + * + * Serves both webview HTML pages and extension resources (JS/CSS/images) + * on localhost with appropriate CSP headers. This bypasses all iframe + * CSP/protocol restrictions since HTTP is universally supported. + */ + +import fs from "node:fs"; +import http from "node:http"; +import os from "node:os"; +import path from "node:path"; +import { MEDIA_MIME_TYPES, writeFileHttpResponse } from "../../file-streaming"; +import { shimLog, shimWarn } from "./debug-log"; + +const MIME_TYPES: Record = { + ".html": "text/html; charset=utf-8", + ".css": "text/css", + ".js": "application/javascript", + ".mjs": "application/javascript", + ".json": "application/json", + ".svg": "image/svg+xml", + ".png": "image/png", + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".gif": "image/gif", + ".woff": "font/woff", + ".woff2": "font/woff2", + ".ttf": "font/ttf", + ".ico": "image/x-icon", + ".wasm": "application/wasm", + ...MEDIA_MIME_TYPES, +}; + +const ALLOWED_ROOTS = [ + path.join(os.homedir(), ".vscode", "extensions"), + path.join(os.homedir(), ".vscode-insiders", "extensions"), +]; + +function isPathAllowed(filePath: string): boolean { + const resolved = path.resolve(filePath); + return ALLOWED_ROOTS.some( + (root) => resolved === root || resolved.startsWith(root + path.sep), + ); +} + +/** Store for webview HTML content, keyed by viewId */ +const htmlStore = new Map(); + +/** VS Code dark theme CSS variables - required for extension webviews to render */ +const VSCODE_THEME_CSS = ``; + +/** Custom theme CSS set from renderer (Superset theme → VS Code vars) */ +let customThemeCss: string | null = null; + +export function setCustomThemeCss(css: string | null): void { + customThemeCss = css; +} + +function getThemeCss(): string { + return customThemeCss ?? VSCODE_THEME_CSS; +} + +/** Bridge script injected into every webview page */ +const BRIDGE_SCRIPT_BODY = ``; + +function getBridgeScript(): string { + return `${getThemeCss()}${BRIDGE_SCRIPT_BODY}`; +} + +let server: http.Server | null = null; +let serverPort = 0; + +export function getWebviewServerPort(): number { + return serverPort; +} + +export function setWebviewHtml(viewId: string, html: string): void { + htmlStore.set(viewId, html); +} + +export function clearWebviewHtml(viewId: string): void { + htmlStore.delete(viewId); +} + +export function hasWebviewHtml(viewId: string): boolean { + return htmlStore.has(viewId); +} + +export function getWebviewUrl(viewId: string): string { + return `http://127.0.0.1:${serverPort}/webview/${encodeURIComponent(viewId)}`; +} + +/** + * Rewrite vscode-webview-resource:// URLs in HTML to use our HTTP server. + */ +function rewriteResourceUrls(html: string): string { + return html.replace( + /vscode-webview-resource:\/\/([^"'\s)]+)/g, + (_, resourcePath) => { + const decoded = decodeURIComponent(resourcePath); + return `http://127.0.0.1:${serverPort}/resource${decoded}`; + }, + ); +} + +/** + * Strip the extension's own CSP meta tag and nonce attributes. + * Our HTTP server provides its own CSP via response headers. + * Extensions set restrictive CSPs with nonces that block our bridge script. + */ +function stripExtensionCsp(html: string): string { + // Remove CSP meta tags + let result = html.replace( + /]*>/gi, + "", + ); + // Remove nonce attributes from script/style tags + result = result.replace(/\s+nonce=["'][^"']*["']/g, ""); + return result; +} + +function injectBridge(html: string): string { + // Inject bridge script + theme CSS into head + let result = html; + if (result.includes("")) { + result = result.replace("", `${getBridgeScript()}`); + } else { + result = `${getBridgeScript()}${result}`; + } + // Add vscode-dark class to body for theme detection + if (result.includes("]*)>/, ''); + } + return result; +} + +export async function startWebviewServer(): Promise { + if (server) return serverPort; + + return new Promise((resolve, reject) => { + server = http.createServer(async (req, res) => { + const url = new URL(req.url ?? "/", `http://127.0.0.1`); + shimLog(`[webview-server] ${req.method} ${url.pathname}`); + + // Serve webview HTML pages: /webview/{viewId} + if (url.pathname.startsWith("/webview/")) { + const viewId = decodeURIComponent( + url.pathname.slice("/webview/".length), + ); + shimLog( + `[webview-server] Serving webview: viewId="${viewId}", htmlStore has ${htmlStore.size} entries: [${[...htmlStore.keys()].join(", ")}]`, + ); + let html = htmlStore.get(viewId); + + if (!html) { + shimWarn(`[webview-server] HTML not found for viewId: ${viewId}`); + res.writeHead(404, { "Content-Type": "text/html; charset=utf-8" }); + res.end( + `

Webview content not available

`, + ); + return; + } + + shimLog(`[webview-server] Raw HTML length: ${html.length}`); + shimLog( + `[webview-server] HTML preview (first 300): ${html.substring(0, 300)}`, + ); + + // Strip extension's CSP (we provide our own via headers), rewrite URLs, inject bridge + const beforeCsp = html.length; + html = stripExtensionCsp(html); + shimLog( + `[webview-server] After CSP strip: ${beforeCsp} -> ${html.length} (removed ${beforeCsp - html.length} chars)`, + ); + + html = rewriteResourceUrls(html); + shimLog(`[webview-server] After URL rewrite: ${html.length} chars`); + + html = injectBridge(html); + shimLog(`[webview-server] After bridge inject: ${html.length} chars`); + shimLog( + `[webview-server] Final HTML preview (first 500): ${html.substring(0, 500)}`, + ); + + res.writeHead(200, { + "Content-Type": "text/html; charset=utf-8", + "Content-Security-Policy": [ + "default-src 'none'", + `script-src 'unsafe-inline' 'unsafe-eval' http://127.0.0.1:${serverPort} https:`, + `style-src 'unsafe-inline' http://127.0.0.1:${serverPort} https:`, + `img-src http://127.0.0.1:${serverPort} https: data: blob:`, + `font-src http://127.0.0.1:${serverPort} https: data:`, + `media-src vscode-webview-resource: http://127.0.0.1:${serverPort} https: data: blob:`, + "connect-src https: wss: ws: http://127.0.0.1:* http://localhost:*", + `frame-src http://127.0.0.1:${serverPort} https:`, + "worker-src blob:", + ].join("; "), + }); + res.end(html); + return; + } + + // Serve extension resources: /resource/{filepath} + if (url.pathname.startsWith("/resource/")) { + let filePath = decodeURIComponent( + url.pathname.slice("/resource".length), + ); + + // Normalize path + if (process.platform === "darwin" && filePath.startsWith("//")) { + filePath = filePath.slice(1); + } + + shimLog( + `[webview-server] Resource request: ${filePath}, allowed: ${isPathAllowed(filePath)}, exists: ${fs.existsSync(filePath)}`, + ); + + if (!isPathAllowed(filePath)) { + res.writeHead(403, { "Content-Type": "text/plain" }); + res.end("Forbidden"); + return; + } + + if (!fs.existsSync(filePath)) { + res.writeHead(404, { "Content-Type": "text/plain" }); + res.end("Not found"); + return; + } + + const ext = path.extname(filePath).toLowerCase(); + const mimeType = MIME_TYPES[ext] ?? "application/octet-stream"; + + await writeFileHttpResponse(req, res, filePath, { + contentType: mimeType, + cacheControl: "public, max-age=3600", + }); + return; + } + + res.writeHead(404); + res.end("Not found"); + }); + + server.listen(0, "127.0.0.1", () => { + const addr = server?.address(); + if (addr && typeof addr === "object") { + serverPort = addr.port; + shimLog( + `[vscode-shim] Webview server listening on http://127.0.0.1:${serverPort}`, + ); + resolve(serverPort); + } else { + reject(new Error("Failed to get server address")); + } + }); + + server.on("error", reject); + }); +} + +export function stopWebviewServer(): void { + server?.close(); + server = null; + serverPort = 0; +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/webview.ts b/apps/desktop/src/main/lib/vscode-shim/api/webview.ts new file mode 100644 index 00000000000..e9137b5b7c7 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/webview.ts @@ -0,0 +1,327 @@ +/** + * VS Code Webview API shim. + */ + +import { shimLog, shimWarn } from "./debug-log"; +import { Disposable, type Event, EventEmitter } from "./event-emitter"; +import { Uri } from "./uri"; + +export interface WebviewOptions { + enableScripts?: boolean; + enableCommandUris?: boolean; + localResourceRoots?: Uri[]; + portMapping?: Array<{ webviewPort: number; extensionHostPort: number }>; +} + +export interface Webview { + options: WebviewOptions; + html: string; + readonly onDidReceiveMessage: Event; + postMessage(message: unknown): Promise; + asWebviewUri(localResource: Uri): Uri; + readonly cspSource: string; +} + +export interface WebviewView { + readonly viewType: string; + readonly webview: Webview; + title?: string; + description?: string; + badge?: { tooltip: string; value: number }; + readonly visible: boolean; + readonly onDidDispose: Event; + readonly onDidChangeVisibility: Event; + show(preserveFocus?: boolean): void; + dispose(): void; +} + +export interface WebviewPanel { + readonly viewType: string; + title: string; + readonly webview: Webview; + readonly active: boolean; + readonly visible: boolean; + readonly viewColumn: number | undefined; + readonly onDidDispose: Event; + readonly onDidChangeViewState: Event<{ webviewPanel: WebviewPanel }>; + iconPath?: Uri | { light: Uri; dark: Uri }; + reveal(viewColumn?: number, preserveFocus?: boolean): void; + dispose(): void; +} + +export interface WebviewViewProvider { + resolveWebviewView( + webviewView: WebviewView, + context: { state?: unknown }, + token: { + isCancellationRequested: boolean; + onCancellationRequested: Event; + }, + ): void | Promise; +} + +export interface WebviewPanelSerializer { + deserializeWebviewPanel( + webviewPanel: WebviewPanel, + state: unknown, + ): Promise; +} + +// Emits when webview html/messages change — consumed by tRPC router +export interface WebviewEvent { + viewId: string; + type: "html" | "message" | "title" | "dispose" | "panel-created"; + data: unknown; +} + +const _onWebviewEvent = new EventEmitter(); +export const onWebviewEvent = _onWebviewEvent.event; + +const viewProviders = new Map(); +const panelSerializers = new Map(); +const activeViews = new Map(); +const activePanels = new Map(); + +export function getViewProvider( + viewType: string, +): WebviewViewProvider | undefined { + return viewProviders.get(viewType); +} + +export function getActiveView(viewId: string): WebviewView | undefined { + return activeViews.get(viewId); +} + +export function getActivePanel(panelId: string): WebviewPanel | undefined { + return activePanels.get(panelId); +} + +export function registerWebviewViewProvider( + viewType: string, + provider: WebviewViewProvider, + _options?: { webviewOptions?: { retainContextWhenHidden?: boolean } }, +): Disposable { + shimLog(`[vscode-shim] registerWebviewViewProvider: ${viewType}`); + viewProviders.set(viewType, provider); + return new Disposable(() => { + viewProviders.delete(viewType); + }); +} + +export function registerWebviewPanelSerializer( + viewType: string, + serializer: WebviewPanelSerializer, +): Disposable { + panelSerializers.set(viewType, serializer); + return new Disposable(() => { + panelSerializers.delete(viewType); + }); +} + +export interface WebviewInternal extends Webview { + _onDidReceiveMessage: EventEmitter; + _onDidPostMessage: EventEmitter; +} + +function createWebview( + _extensionPath: string, + options?: WebviewOptions, +): WebviewInternal { + const _onDidReceiveMessage = new EventEmitter(); + const _onDidPostMessage = new EventEmitter(); + let _html = ""; + + return { + options: options ?? {}, + get html() { + return _html; + }, + set html(value: string) { + _html = value; + }, + onDidReceiveMessage: _onDidReceiveMessage.event, + _onDidReceiveMessage, + _onDidPostMessage, + async postMessage(message: unknown): Promise { + _onDidPostMessage.fire(message); + return true; + }, + asWebviewUri(localResource: Uri): Uri { + return Uri.from({ + scheme: "vscode-webview-resource", + path: localResource.path, + }); + }, + cspSource: "vscode-webview-resource:", + }; +} + +/** Called from renderer when a sidebar view becomes visible */ +export function resolveWebviewView( + viewType: string, + extensionPath: string, +): { view: WebviewView; viewId: string } | undefined { + shimLog( + `[vscode-shim] resolveWebviewView: ${viewType}, registered providers: [${[...viewProviders.keys()].join(", ")}]`, + ); + const provider = viewProviders.get(viewType); + if (!provider) { + shimWarn(`[vscode-shim] No provider found for viewType: ${viewType}`); + return undefined; + } + + const _onDidDispose = new EventEmitter(); + const _onDidChangeVisibility = new EventEmitter(); + const webview = createWebview(extensionPath, { enableScripts: true }); + const viewId = `view:${viewType}:${Date.now()}`; + + // Relay extension→webview postMessage as events (so tRPC subscription can forward to iframe) + webview._onDidPostMessage.event((message) => { + shimLog( + `[webview:${viewId}] postMessage from extension to webview, type=${typeof message === "object" && message !== null && "type" in message ? (message as { type: string }).type : "unknown"}`, + ); + _onWebviewEvent.fire({ viewId, type: "message", data: message }); + }); + + // Intercept html setter to emit events + const rawWebview = webview; + const proxiedWebview = new Proxy(rawWebview, { + set(target, prop, value) { + if (prop === "html") { + const htmlStr = typeof value === "string" ? value : String(value); + shimLog( + `[webview:${viewId}] HTML set, length=${htmlStr.length}, preview="${htmlStr.substring(0, 100)}..."`, + ); + (target as { html: string }).html = value; + _onWebviewEvent.fire({ viewId, type: "html", data: value }); + return true; + } + shimLog(`[webview:${viewId}] Property set: ${String(prop)}`); + (target as unknown as Record)[prop] = value; + return true; + }, + }); + + const view: WebviewView = { + viewType, + webview: proxiedWebview, + title: undefined, + description: undefined, + badge: undefined, + visible: true, + onDidDispose: _onDidDispose.event, + onDidChangeVisibility: _onDidChangeVisibility.event, + show(_preserveFocus?: boolean) { + // noop for now + }, + dispose() { + _onDidDispose.fire(); + _onWebviewEvent.fire({ viewId, type: "dispose", data: null }); + activeViews.delete(viewId); + }, + }; + + activeViews.set(viewId, view); + + const cancellationToken = { + isCancellationRequested: false, + onCancellationRequested: new EventEmitter().event, + }; + + shimLog(`[webview:${viewId}] Calling provider.resolveWebviewView...`); + try { + const result = provider.resolveWebviewView( + view, + { state: undefined }, + cancellationToken, + ); + if (result && typeof (result as Promise).then === "function") { + (result as Promise) + .then(() => { + shimLog( + `[webview:${viewId}] Provider resolved (async). HTML set: ${!!rawWebview.html}, len=${rawWebview.html?.length ?? 0}`, + ); + }) + .catch((err: unknown) => { + console.error(`[webview:${viewId}] Provider rejected:`, err); + }); + } else { + shimLog( + `[webview:${viewId}] Provider resolved (sync). HTML set: ${!!rawWebview.html}, len=${rawWebview.html?.length ?? 0}`, + ); + } + } catch (err) { + console.error(`[webview:${viewId}] Provider threw:`, err); + } + + return { view, viewId }; +} + +export function createWebviewPanel( + viewType: string, + title: string, + showOptions: number | { viewColumn: number; preserveFocus?: boolean }, + extensionPath: string, + options?: WebviewOptions, +): WebviewPanel { + const _onDidDispose = new EventEmitter(); + const _onDidChangeViewState = new EventEmitter<{ + webviewPanel: WebviewPanel; + }>(); + const webview = createWebview(extensionPath, options); + const panelId = `panel:${viewType}:${Date.now()}`; + const viewColumn = + typeof showOptions === "number" ? showOptions : showOptions.viewColumn; + + // Relay extension→panel postMessage as events + webview._onDidPostMessage.event((message) => { + _onWebviewEvent.fire({ viewId: panelId, type: "message", data: message }); + }); + + const proxiedWebview = new Proxy(webview, { + set(target, prop, value) { + if (prop === "html") { + const htmlStr = typeof value === "string" ? value : String(value); + shimLog( + `[webview:${panelId}] Panel HTML set, length=${htmlStr.length}`, + ); + (target as { html: string }).html = value; + _onWebviewEvent.fire({ viewId: panelId, type: "html", data: value }); + return true; + } + (target as unknown as Record)[prop] = value; + return true; + }, + }); + + const panel: WebviewPanel = { + viewType, + title, + webview: proxiedWebview, + active: true, + visible: true, + viewColumn, + onDidDispose: _onDidDispose.event, + onDidChangeViewState: _onDidChangeViewState.event, + iconPath: undefined, + reveal(_viewColumn?: number, _preserveFocus?: boolean) { + // noop for now + }, + dispose() { + _onDidDispose.fire(); + _onWebviewEvent.fire({ viewId: panelId, type: "dispose", data: null }); + activePanels.delete(panelId); + }, + }; + + activePanels.set(panelId, panel); + + // Notify renderer to create a UI tab for this panel + _onWebviewEvent.fire({ + viewId: panelId, + type: "panel-created" as WebviewEvent["type"], + data: { viewType, title, panelId, extensionPath }, + }); + + return panel; +} diff --git a/apps/desktop/src/main/lib/vscode-shim/api/window.ts b/apps/desktop/src/main/lib/vscode-shim/api/window.ts new file mode 100644 index 00000000000..1cd3c31ef0c --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/window.ts @@ -0,0 +1,573 @@ +/** + * VS Code window API shim. + */ + +import type { WorkerToMainMessage } from "../ipc-types"; +import { shimLog, shimWarn } from "./debug-log"; +import { Disposable, type Event, EventEmitter } from "./event-emitter"; +import { createOutputChannel, type OutputChannel } from "./output-channel"; +import { + createTerminal as createTerminalImpl, + getActiveTerminal, + getTerminals, + terminalEvents, +} from "./terminal-shim"; +import { Uri } from "./uri"; +import { + createWebviewPanel, + registerWebviewPanelSerializer, + registerWebviewViewProvider, + type WebviewOptions, + type WebviewPanel, +} from "./webview"; +import { setActiveWorkspaceTextDocument } from "./workspace"; + +const QUICK_PICK_ITEM_KIND_SEPARATOR = -1; + +interface TextEditor { + readonly document: { + uri: Uri; + fileName: string; + getText(range?: unknown): string; + languageId: string; + isDirty?: boolean; + isUntitled?: boolean; + }; + readonly selection: { + readonly start: { line: number; character: number }; + readonly end: { line: number; character: number }; + readonly isEmpty: boolean; + readonly active: { line: number; character: number }; + }; + readonly selections: Array; + readonly viewColumn: number | undefined; + edit( + callback: (builder: { + insert( + position: { line: number; character: number }, + value: string, + ): void; + }) => void, + ): Promise; +} + +interface Terminal { + readonly name: string; + readonly processId: Promise; + sendText(text: string, addNewLine?: boolean): void; + show(preserveFocus?: boolean): void; + hide(): void; + dispose(): void; + readonly exitStatus: { code: number | undefined } | undefined; + readonly shellIntegration?: { + executeCommand(command: string): { + execution: { commandLine: string }; + read(): AsyncIterable; + }; + }; +} + +// Minimal stubs for activeTextEditor and visible editors +const _onDidChangeActiveTextEditor = new EventEmitter(); +const _onDidChangeVisibleTextEditors = new EventEmitter(); +const _onDidChangeTextEditorSelection = new EventEmitter(); + +// Emits when showTextDocument is called - renderer listens to open file viewer +const _openFileEmitter = new EventEmitter<{ + filePath: string; + line?: number; +}>(); +export const onOpenFile = _openFileEmitter.event; +export function fireOpenFile(filePath: string, line?: number): void { + _openFileEmitter.fire({ filePath, line }); +} + +// Emits when vscode.diff is called - renderer listens to open diff viewer +const _openDiffEmitter = new EventEmitter<{ + leftUri: string; + rightUri: string; + title?: string; + leftContent?: string; +}>(); +export const onOpenDiff = _openDiffEmitter.event; +export function fireOpenDiff( + leftUri: string, + rightUri: string, + title?: string, + leftContent?: string, +): void { + _openDiffEmitter.fire({ leftUri, rightUri, title, leftContent }); +} + +// IPC send function — injected from worker process so dialog calls go via main +let _sendToMain: ((msg: WorkerToMainMessage) => void) | null = null; +export function setSendToMain(fn: (msg: WorkerToMainMessage) => void): void { + _sendToMain = fn; +} + +// Pending dialog requests waiting for main-process response +const _pendingDialogs = new Map void>(); +export function resolveDialogResult( + requestId: string, + selectedIndex: number, +): void { + _pendingDialogs.get(requestId)?.(selectedIndex); + _pendingDialogs.delete(requestId); +} +export function resolveOpenDialogResult( + requestId: string, + filePaths: string[] | null, +): void { + _pendingDialogs.get(requestId)?.(filePaths); + _pendingDialogs.delete(requestId); +} + +async function showMessageViaIpc( + method: "showInformationMessage" | "showWarningMessage" | "showErrorMessage", + message: string, + items: string[], +): Promise { + if (!_sendToMain) return undefined; + const requestId = crypto.randomUUID(); + const selectedIndex = await new Promise((resolve) => { + _pendingDialogs.set(requestId, (v) => resolve(v as number)); + _sendToMain?.({ type: "show-dialog", requestId, method, message, items }); + }); + if (selectedIndex < 0) return undefined; + return items[selectedIndex]; +} + +// Active text editor state — updated from renderer via tRPC +let _activeTextEditor: TextEditor | undefined; +const _visibleTextEditors: TextEditor[] = []; + +/** Called from tRPC when the focused file-viewer pane changes */ +export function setActiveTextEditor( + filePath: string | null, + languageId?: string, +): void { + const previous = _activeTextEditor; + + if (!filePath) { + _activeTextEditor = undefined; + setActiveWorkspaceTextDocument(null); + } else { + const uri = Uri.file(filePath); + _activeTextEditor = { + document: { + uri, + fileName: filePath, + getText() { + try { + return require("node:fs").readFileSync(filePath, "utf-8"); + } catch { + return ""; + } + }, + languageId: languageId ?? "plaintext", + isDirty: false, + isUntitled: false, + }, + selection: { + start: { line: 0, character: 0 }, + end: { line: 0, character: 0 }, + isEmpty: true, + active: { line: 0, character: 0 }, + }, + selections: [], + viewColumn: 1, + async edit(callback) { + const inserts: Array<{ + position: { line: number; character: number }; + value: string; + }> = []; + callback({ + insert(position, value) { + inserts.push({ position, value }); + }, + }); + if (inserts.length === 0) { + return true; + } + + try { + const fs = require("node:fs") as typeof import("node:fs"); + const content = fs.readFileSync(filePath, "utf-8"); + const lines = content.split("\n"); + const sortedInserts = [...inserts].sort((left, right) => { + const lineDelta = right.position.line - left.position.line; + return lineDelta !== 0 + ? lineDelta + : right.position.character - left.position.character; + }); + + for (const insert of sortedInserts) { + const line = lines[insert.position.line] ?? ""; + lines[insert.position.line] = + line.slice(0, insert.position.character) + + insert.value + + line.slice(insert.position.character); + } + + fs.writeFileSync(filePath, lines.join("\n"), "utf-8"); + return true; + } catch (error) { + shimWarn("[vscode-shim] TextEditor.edit failed:", error); + return false; + } + }, + }; + setActiveWorkspaceTextDocument(filePath, languageId); + // Update visible editors + _visibleTextEditors.length = 0; + _visibleTextEditors.push(_activeTextEditor); + } + + if (previous !== _activeTextEditor) { + _onDidChangeActiveTextEditor.fire(_activeTextEditor); + _onDidChangeVisibleTextEditors.fire([..._visibleTextEditors]); + if (_activeTextEditor) { + _onDidChangeTextEditorSelection.fire({ + textEditor: _activeTextEditor, + selections: [_activeTextEditor.selection], + kind: 1, + }); + } + } +} + +// URI handlers for deep-link activation (e.g., ChatGPT OAuth) +const uriHandlers: Array<{ handleUri(uri: Uri): void }> = []; + +/** Called from Electron's open-url handler to dispatch URIs to extensions */ +export function handleUri(uri: Uri): void { + for (const handler of uriHandlers) { + try { + handler.handleUri(uri); + } catch (err) { + console.error("[vscode-shim] URI handler error:", err); + } + } +} + +// Terminal events are delegated to terminal-shim.ts + +export const window = { + // Text editor + get activeTextEditor(): TextEditor | undefined { + return _activeTextEditor; + }, + + get visibleTextEditors(): TextEditor[] { + return [..._visibleTextEditors]; + }, + + get activeTerminal(): Terminal | undefined { + return getActiveTerminal() as Terminal | undefined; + }, + + get terminals(): Terminal[] { + return getTerminals() as Terminal[]; + }, + + onDidChangeActiveTextEditor: _onDidChangeActiveTextEditor.event, + onDidChangeVisibleTextEditors: _onDidChangeVisibleTextEditors.event, + onDidChangeTextEditorSelection: _onDidChangeTextEditorSelection.event, + onDidOpenTerminal: terminalEvents.onDidOpenTerminal, + onDidCloseTerminal: terminalEvents.onDidCloseTerminal, + onDidChangeActiveTerminal: terminalEvents.onDidChangeActiveTerminal, + onDidEndTerminalShellExecution: terminalEvents.onDidEndTerminalShellExecution, + onDidChangeTerminalShellIntegration: + terminalEvents.onDidChangeTerminalShellIntegration, + onDidChangeWindowState: new EventEmitter<{ focused: boolean }>().event, + state: { focused: true, active: true }, + + // Tab groups + tabGroups: { + all: [] as Array<{ + tabs: unknown[]; + isActive: boolean; + viewColumn: number; + }>, + get activeTabGroup() { + return { tabs: [], isActive: true, viewColumn: 1 }; + }, + onDidChangeTabGroups: new EventEmitter().event, + onDidChangeTabs: new EventEmitter().event, + close(_tab: unknown): Promise { + return Promise.resolve(true); + }, + }, + + // Messages — sent via IPC to main process (Worker cannot access Electron dialog directly) + async showInformationMessage( + message: string, + ...items: string[] + ): Promise { + if (items.length === 0) { + shimLog(`[vscode-shim] INFO: ${message}`); + return undefined; + } + return showMessageViaIpc("showInformationMessage", message, items); + }, + + async showWarningMessage( + message: string, + ...items: string[] + ): Promise { + if (items.length === 0) { + shimWarn(`[vscode-shim] WARN: ${message}`); + return undefined; + } + return showMessageViaIpc("showWarningMessage", message, items); + }, + + async showErrorMessage( + message: string, + ...items: string[] + ): Promise { + if (items.length === 0) { + console.error(`[vscode-shim] ERROR: ${message}`); + return undefined; + } + return showMessageViaIpc("showErrorMessage", message, items); + }, + + async showQuickPick( + items: + | string[] + | Array<{ + label: string; + description?: string; + detail?: string; + kind?: number; + }> + | Promise< + | string[] + | Array<{ + label: string; + description?: string; + detail?: string; + kind?: number; + }> + >, + options?: { placeHolder?: string; canPickMany?: boolean }, + ): Promise { + const resolved = await items; + if (!resolved || resolved.length === 0) return undefined; + const selectableItems = resolved.filter((item) => { + return ( + typeof item === "string" || item.kind !== QUICK_PICK_ITEM_KIND_SEPARATOR + ); + }); + if (selectableItems.length === 0) return undefined; + const labels = selectableItems.map((item) => + typeof item === "string" ? item : item.label, + ); + if (!_sendToMain) { + shimWarn("[vscode-shim] showQuickPick: no IPC channel available"); + return undefined; + } + const requestId = crypto.randomUUID(); + const selectedIndex = await new Promise((resolve) => { + _pendingDialogs.set(requestId, (v) => resolve(v as number)); + _sendToMain?.({ + type: "show-quickpick", + requestId, + labels, + placeHolder: options?.placeHolder, + }); + }); + if (selectedIndex < 0) return undefined; + return selectableItems[selectedIndex]; + }, + + async showInputBox(_options?: { + prompt?: string; + value?: string; + placeHolder?: string; + }): Promise { + shimWarn("[vscode-shim] showInputBox stub"); + return undefined; + }, + + async showOpenDialog(options?: { + canSelectFiles?: boolean; + canSelectFolders?: boolean; + canSelectMany?: boolean; + title?: string; + filters?: Record; + defaultUri?: Uri; + }): Promise { + if (!_sendToMain) { + shimWarn("[vscode-shim] showOpenDialog: no IPC channel available"); + return undefined; + } + const filters = options?.filters + ? Object.entries(options.filters).map(([name, extensions]) => ({ + name, + extensions, + })) + : undefined; + const requestId = crypto.randomUUID(); + const filePaths = await new Promise((resolve) => { + _pendingDialogs.set(requestId, (v) => resolve(v as string[] | null)); + _sendToMain?.({ + type: "show-open-dialog", + requestId, + canSelectFiles: options?.canSelectFiles, + canSelectFolders: options?.canSelectFolders, + canSelectMany: options?.canSelectMany, + title: options?.title, + filters, + defaultPath: options?.defaultUri?.fsPath, + }); + }); + if (!filePaths || filePaths.length === 0) return undefined; + return filePaths.map((p) => Uri.file(p)); + }, + + async showTextDocument( + document: { uri: Uri } | Uri, + _options?: unknown, + ): Promise { + const uri = + "uri" in (document as object) + ? (document as { uri: Uri }).uri + : (document as Uri); + shimLog(`[vscode-shim] showTextDocument: ${uri.toString()}`); + + // Notify renderer to open the file in file viewer + if (uri.scheme === "file" && uri.fsPath) { + fireOpenFile( + uri.fsPath, + (_options as { selection?: { start?: { line?: number } } })?.selection + ?.start?.line, + ); + } + + // Return a minimal editor stub + return { + document: { + uri, + fileName: uri.fsPath, + getText() { + return ""; + }, + languageId: "plaintext", + isDirty: false, + isUntitled: false, + }, + selection: { + start: { line: 0, character: 0 }, + end: { line: 0, character: 0 }, + isEmpty: true, + active: { line: 0, character: 0 }, + }, + selections: [], + viewColumn: 1, + async edit(_callback) { + return false; + }, + }; + }, + + withProgress( + _options: { location: number; title?: string; cancellable?: boolean }, + task: ( + progress: { + report(value: { message?: string; increment?: number }): void; + }, + token: { + isCancellationRequested: boolean; + onCancellationRequested: Event; + }, + ) => Promise, + ): Promise { + const progress = { + report(_value: { message?: string; increment?: number }) { + // noop for now + }, + }; + const token = { + isCancellationRequested: false, + onCancellationRequested: new EventEmitter().event, + }; + return task(progress, token); + }, + + createOutputChannel( + name: string, + options?: { log: true } | string, + ): OutputChannel { + return createOutputChannel(name, options); + }, + + createTerminal( + nameOrOptions?: + | string + | { + name?: string; + cwd?: string; + env?: Record; + shellPath?: string; + shellArgs?: string[]; + }, + ): Terminal { + return createTerminalImpl(nameOrOptions) as Terminal; + }, + + registerUriHandler(handler: { handleUri(uri: Uri): void }): Disposable { + uriHandlers.push(handler); + return new Disposable(() => { + const idx = uriHandlers.indexOf(handler); + if (idx >= 0) uriHandlers.splice(idx, 1); + }); + }, + + registerCustomEditorProvider( + viewType: string, + _provider: unknown, + _options?: { + webviewOptions?: { retainContextWhenHidden?: boolean }; + supportsMultipleEditorsPerDocument?: boolean; + }, + ): Disposable { + shimLog(`[vscode-shim] registerCustomEditorProvider: ${viewType}`); + return new Disposable(() => {}); + }, + + createStatusBarItem( + _alignmentOrId?: unknown, + _priority?: number, + ): { + text: string; + tooltip: string; + command: string | undefined; + show(): void; + hide(): void; + dispose(): void; + } { + return { + text: "", + tooltip: "", + command: undefined, + show() {}, + hide() {}, + dispose() {}, + }; + }, + + // Webview delegation + registerWebviewViewProvider, + registerWebviewPanelSerializer, + + createWebviewPanel( + viewType: string, + title: string, + showOptions: number | { viewColumn: number; preserveFocus?: boolean }, + options?: WebviewOptions, + ): WebviewPanel { + return createWebviewPanel(viewType, title, showOptions, "", options); + }, +}; diff --git a/apps/desktop/src/main/lib/vscode-shim/api/workspace.ts b/apps/desktop/src/main/lib/vscode-shim/api/workspace.ts new file mode 100644 index 00000000000..c93e283e7b2 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/api/workspace.ts @@ -0,0 +1,659 @@ +/** + * VS Code workspace API shim. + */ + +import fs from "node:fs"; +import path from "node:path"; +import { getConfiguration, onDidChangeConfiguration } from "./configuration"; +import { shimLog, shimWarn } from "./debug-log"; +import { Disposable, type Event, EventEmitter } from "./event-emitter"; +import { + compileGlobMatchers, + compileGlobPatterns, + directoryMayContainMatches, + globToRegExp, + matchesAnyGlob, + normalizeGlobPath, +} from "./glob-utils"; +import { Uri } from "./uri"; + +interface WorkspaceFolder { + readonly uri: Uri; + readonly name: string; + readonly index: number; +} + +interface TextDocument { + readonly uri: Uri; + readonly fileName: string; + readonly languageId: string; + readonly version: number; + readonly lineCount: number; + readonly isDirty: boolean; + readonly isUntitled: boolean; + getText(range?: unknown): string; + save(): Promise; +} + +interface TextEditRange { + start: { line: number; character: number }; + end: { line: number; character: number }; +} + +interface WorkspaceEdit { + entries(): Array<[Uri, Array<{ range: TextEditRange; newText: string }>]>; +} + +interface FileSystemWatcher { + readonly onDidCreate: Event; + readonly onDidChange: Event; + readonly onDidDelete: Event; + dispose(): void; +} + +// Current workspace path — set via setWorkspacePath() +let workspaceFolderPath: string | undefined; +const _onDidChangeWorkspaceFolders = new EventEmitter<{ + added: Array<{ uri: Uri; name: string; index: number }>; + removed: Array<{ uri: Uri; name: string; index: number }>; +}>(); +const _onDidChangeTextDocument = new EventEmitter(); +const _onDidOpenTextDocument = new EventEmitter(); +const _onDidCloseTextDocument = new EventEmitter(); +const _onWillSaveTextDocument = new EventEmitter(); +const _textDocuments: TextDocument[] = []; + +const fileSystemProviders = new Map(); +const textDocumentContentProviders = new Map(); +const DEFAULT_FIND_EXCLUDE_GLOBS = ["**/.git", "**/node_modules"]; +const FILE_TYPE = { + File: 1, + Directory: 2, + SymbolicLink: 64, +} as const; + +export async function resolveTextDocumentContent( + uri: Uri, +): Promise { + if (uri.scheme === "file") { + try { + return await fs.promises.readFile(uri.fsPath, "utf-8"); + } catch { + return undefined; + } + } + + const provider = textDocumentContentProviders.get(uri.scheme) as + | { + provideTextDocumentContent?( + uri: Uri, + ): string | undefined | Promise; + } + | undefined; + if (!provider?.provideTextDocumentContent) { + return undefined; + } + + const content = await provider.provideTextDocumentContent(uri); + return typeof content === "string" ? content : undefined; +} + +export function setWorkspacePath(folderPath: string): void { + const oldPath = workspaceFolderPath; + workspaceFolderPath = folderPath; + + if (oldPath !== folderPath) { + _onDidChangeWorkspaceFolders.fire({ + added: folderPath + ? [ + { + uri: Uri.file(folderPath), + name: path.basename(folderPath), + index: 0, + }, + ] + : [], + removed: oldPath + ? [{ uri: Uri.file(oldPath), name: path.basename(oldPath), index: 0 }] + : [], + }); + } +} + +export function setActiveWorkspaceTextDocument( + filePath: string | null, + languageId?: string, +): void { + _textDocuments.length = 0; + if (!filePath) { + return; + } + + const readContent = () => { + try { + return fs.readFileSync(filePath, "utf-8"); + } catch { + return ""; + } + }; + + const content = readContent(); + const doc: TextDocument = { + uri: Uri.file(filePath), + fileName: filePath, + languageId: languageId ?? (path.extname(filePath).slice(1) || "plaintext"), + version: 1, + lineCount: content.split("\n").length, + isDirty: false, + isUntitled: false, + getText() { + return readContent(); + }, + async save() { + return true; + }, + }; + _textDocuments.push(doc); +} + +export const workspace = { + get workspaceFolders(): WorkspaceFolder[] | undefined { + if (!workspaceFolderPath) return undefined; + return [ + { + uri: Uri.file(workspaceFolderPath), + name: path.basename(workspaceFolderPath), + index: 0, + }, + ]; + }, + + get rootPath(): string | undefined { + return workspaceFolderPath; + }, + + get workspaceFile(): Uri | undefined { + return undefined; + }, + + get textDocuments(): TextDocument[] { + return [..._textDocuments]; + }, + + get name(): string | undefined { + return workspaceFolderPath ? path.basename(workspaceFolderPath) : undefined; + }, + + onDidChangeWorkspaceFolders: _onDidChangeWorkspaceFolders.event, + onDidChangeTextDocument: _onDidChangeTextDocument.event, + onDidOpenTextDocument: _onDidOpenTextDocument.event, + onDidCloseTextDocument: _onDidCloseTextDocument.event, + onWillSaveTextDocument: _onWillSaveTextDocument.event, + onDidChangeConfiguration, + + getConfiguration, + + getWorkspaceFolder(uri: Uri): WorkspaceFolder | undefined { + if (!workspaceFolderPath) return undefined; + if (uri.fsPath.startsWith(workspaceFolderPath)) { + return { + uri: Uri.file(workspaceFolderPath), + name: path.basename(workspaceFolderPath), + index: 0, + }; + } + return undefined; + }, + + asRelativePath( + pathOrUri: string | Uri, + _includeWorkspaceFolder?: boolean, + ): string { + const p = typeof pathOrUri === "string" ? pathOrUri : pathOrUri.fsPath; + if (!workspaceFolderPath) return p; + const rel = path.relative(workspaceFolderPath, p); + if (rel.startsWith("..")) return p; + return rel; + }, + + async openTextDocument(uriOrPath: Uri | string): Promise { + const uri = typeof uriOrPath === "string" ? Uri.file(uriOrPath) : uriOrPath; + const filePath = uri.scheme === "file" ? uri.fsPath : uri.path; + const content = (await resolveTextDocumentContent(uri)) ?? ""; + const lines = content.split("\n"); + const ext = path.extname(filePath).slice(1); + + return { + uri, + fileName: filePath, + languageId: ext || "plaintext", + version: 1, + lineCount: lines.length, + isDirty: false, + isUntitled: false, + getText(_range?: unknown) { + return content; + }, + async save() { + return true; + }, + }; + }, + + async findFiles( + include: string, + exclude?: string | null, + maxResults?: number, + _token?: unknown, + ): Promise { + if (!workspaceFolderPath) return []; + const rootPath = workspaceFolderPath; + try { + const results: string[] = []; + const includePatterns = compileGlobPatterns(include); + const includeMatchers = includePatterns.map((pattern) => + globToRegExp(pattern), + ); + const excludeMatchers = compileGlobMatchers( + exclude === undefined + ? `{${DEFAULT_FIND_EXCLUDE_GLOBS.join(",")}}` + : exclude, + ); + + function walkDir(dir: string, depth: number): void { + if (depth > 15 || (maxResults && results.length >= maxResults)) return; + let entries: fs.Dirent[]; + try { + entries = fs.readdirSync(dir, { withFileTypes: true }); + } catch { + return; + } + for (const entry of entries) { + const fullPath = path.join(dir, entry.name); + const relativePath = normalizeGlobPath( + path.relative(rootPath, fullPath), + ); + if ( + relativePath && + (matchesAnyGlob(excludeMatchers, relativePath) || + (entry.isDirectory() && + matchesAnyGlob(excludeMatchers, `${relativePath}/`))) + ) { + continue; + } + if (entry.isDirectory()) { + walkDir(fullPath, depth + 1); + } else if (entry.isFile()) { + if ( + includeMatchers.length === 0 || + matchesAnyGlob(includeMatchers, relativePath) + ) { + results.push(fullPath); + } + } + if (maxResults && results.length >= maxResults) return; + } + } + + walkDir(workspaceFolderPath, 0); + return results.map((r) => Uri.file(r)); + } catch { + shimWarn("[vscode-shim] workspace.findFiles failed, returning empty"); + return []; + } + }, + + async applyEdit(edit: WorkspaceEdit): Promise { + try { + for (const [uri, textEdits] of edit.entries()) { + if (uri.scheme !== "file" || !uri.fsPath || textEdits.length === 0) + continue; + const content = fs.readFileSync(uri.fsPath, "utf-8"); + const lines = content.split("\n"); + // 後ろから適用することでインデックスのずれを防ぐ + const sorted = [...textEdits].sort((a, b) => { + const dl = b.range.start.line - a.range.start.line; + return dl !== 0 + ? dl + : b.range.start.character - a.range.start.character; + }); + for (const te of sorted) { + const { start, end } = te.range; + if (start.line === end.line) { + const line = lines[start.line] ?? ""; + const merged = + line.slice(0, start.character) + + te.newText + + line.slice(end.character); + lines.splice(start.line, 1, ...merged.split("\n")); + } else { + const startLine = lines[start.line] ?? ""; + const endLine = lines[end.line] ?? ""; + const merged = + startLine.slice(0, start.character) + + te.newText + + endLine.slice(end.character); + lines.splice( + start.line, + end.line - start.line + 1, + ...merged.split("\n"), + ); + } + } + fs.writeFileSync(uri.fsPath, lines.join("\n"), "utf-8"); + shimLog(`[vscode-shim] workspace.applyEdit: wrote ${uri.fsPath}`); + } + return true; + } catch (err) { + shimWarn("[vscode-shim] workspace.applyEdit failed:", err); + return false; + } + }, + + createFileSystemWatcher( + _globPattern: string, + _ignoreCreateEvents?: boolean, + _ignoreChangeEvents?: boolean, + _ignoreDeleteEvents?: boolean, + ): FileSystemWatcher { + const _onCreate = new EventEmitter(); + const _onChange = new EventEmitter(); + const _onDelete = new EventEmitter(); + const rootPath = workspaceFolderPath; + const includePatterns = compileGlobPatterns(_globPattern); + const includeMatchers = includePatterns.map((pattern) => + globToRegExp(pattern), + ); + const excludeMatchers = compileGlobMatchers( + `{${DEFAULT_FIND_EXCLUDE_GLOBS.join(",")}}`, + ); + const dirWatchers = new Map(); + + const matchesWatcherPath = (fullPath: string): boolean => { + if (!rootPath) { + return false; + } + const relativePath = normalizeGlobPath(path.relative(rootPath, fullPath)); + if (!relativePath || relativePath.startsWith("..")) { + return false; + } + return ( + includeMatchers.length === 0 || + matchesAnyGlob(includeMatchers, relativePath) || + matchesAnyGlob(includeMatchers, `${relativePath}/`) + ); + }; + + const shouldSkipDirectory = (directoryPath: string): boolean => { + if (!rootPath) { + return false; + } + const relativePath = normalizeGlobPath( + path.relative(rootPath, directoryPath), + ); + if (!relativePath || relativePath.startsWith("..")) { + return false; + } + return ( + matchesAnyGlob(excludeMatchers, relativePath) || + matchesAnyGlob(excludeMatchers, `${relativePath}/`) || + !directoryMayContainMatches(relativePath, includePatterns) + ); + }; + + const closeDescendantWatchers = (targetPath: string) => { + const watchedDirs = [...dirWatchers.keys()]; + for (const watchedDir of watchedDirs) { + if ( + watchedDir === targetPath || + watchedDir.startsWith(`${targetPath}${path.sep}`) + ) { + const watcher = dirWatchers.get(watchedDir); + if (!watcher) { + continue; + } + watcher.close(); + dirWatchers.delete(watchedDir); + } + } + }; + + const addDirectoryWatcher = (directoryPath: string) => { + if (dirWatchers.has(directoryPath)) { + return; + } + if (shouldSkipDirectory(directoryPath)) { + return; + } + + try { + const watcher = fs.watch(directoryPath, (eventType, filename) => { + if (!filename) { + return; + } + + const fullPath = path.join(directoryPath, filename.toString()); + const exists = fs.existsSync(fullPath); + + if (exists) { + try { + if ( + fs.statSync(fullPath).isDirectory() && + !shouldSkipDirectory(fullPath) + ) { + addDirectoryWatcher(fullPath); + } + } catch {} + } else { + closeDescendantWatchers(fullPath); + } + + if (!matchesWatcherPath(fullPath)) { + return; + } + + const uri = Uri.file(fullPath); + if (!exists) { + if (!_ignoreDeleteEvents) { + _onDelete.fire(uri); + } + return; + } + + if (eventType === "change") { + if (!_ignoreChangeEvents) { + _onChange.fire(uri); + } + return; + } + + if (!_ignoreCreateEvents) { + _onCreate.fire(uri); + } + }); + dirWatchers.set(directoryPath, watcher); + } catch (error) { + shimWarn( + `[vscode-shim] createFileSystemWatcher failed for ${directoryPath}:`, + error, + ); + return; + } + + let entries: fs.Dirent[]; + try { + entries = fs.readdirSync(directoryPath, { withFileTypes: true }); + } catch { + return; + } + + for (const entry of entries) { + if (!entry.isDirectory()) { + continue; + } + addDirectoryWatcher(path.join(directoryPath, entry.name)); + } + }; + + if (rootPath) { + addDirectoryWatcher(rootPath); + } + + return { + onDidCreate: _onCreate.event, + onDidChange: _onChange.event, + onDidDelete: _onDelete.event, + dispose() { + for (const watcher of dirWatchers.values()) { + watcher.close(); + } + dirWatchers.clear(); + _onCreate.dispose(); + _onChange.dispose(); + _onDelete.dispose(); + }, + }; + }, + + registerFileSystemProvider( + scheme: string, + provider: unknown, + _options?: { isCaseSensitive?: boolean; isReadonly?: boolean }, + ): Disposable { + fileSystemProviders.set(scheme, provider); + shimLog( + `[vscode-shim] Registered FileSystemProvider for scheme: ${scheme}`, + ); + return new Disposable(() => { + fileSystemProviders.delete(scheme); + }); + }, + + /** Get a registered file system provider (used by workspace.fs for custom schemes) */ + _getFileSystemProvider(scheme: string): unknown { + return fileSystemProviders.get(scheme); + }, + + registerTextDocumentContentProvider( + scheme: string, + provider: unknown, + ): Disposable { + textDocumentContentProviders.set(scheme, provider); + return new Disposable(() => { + textDocumentContentProviders.delete(scheme); + }); + }, + + fs: { + async readFile(uri: Uri): Promise { + // Check custom FS providers for non-file schemes + if (uri.scheme !== "file") { + const provider = fileSystemProviders.get(uri.scheme) as + | { readFile?(uri: Uri): Promise } + | undefined; + if (provider?.readFile) { + return provider.readFile(uri); + } + throw new Error(`No file system provider for scheme: ${uri.scheme}`); + } + return fs.promises.readFile(uri.fsPath); + }, + async writeFile(uri: Uri, content: Uint8Array): Promise { + await fs.promises.writeFile(uri.fsPath, content); + }, + async stat(uri: Uri): Promise<{ + type: number; + ctime: number; + mtime: number; + size: number; + }> { + if (uri.scheme !== "file") { + const provider = fileSystemProviders.get(uri.scheme) as + | { + stat?(uri: Uri): Promise<{ + type: number; + ctime: number; + mtime: number; + size: number; + }>; + } + | undefined; + if (provider?.stat) { + return provider.stat(uri); + } + return { type: 1, ctime: 0, mtime: 0, size: 0 }; + } + const s = await fs.promises.stat(uri.fsPath); + return { + type: s.isDirectory() ? 2 : 1, + ctime: s.ctimeMs, + mtime: s.mtimeMs, + size: s.size, + }; + }, + async delete( + uri: Uri, + _options?: { recursive?: boolean; useTrash?: boolean }, + ): Promise { + await fs.promises.rm(uri.fsPath, { recursive: _options?.recursive }); + }, + async rename( + source: Uri, + target: Uri, + _options?: { overwrite?: boolean }, + ): Promise { + await fs.promises.rename(source.fsPath, target.fsPath); + }, + async createDirectory(uri: Uri): Promise { + await fs.promises.mkdir(uri.fsPath, { recursive: true }); + }, + async copy( + source: Uri, + target: Uri, + _options?: { overwrite?: boolean }, + ): Promise { + await fs.promises.copyFile(source.fsPath, target.fsPath); + }, + async readDirectory(uri: Uri): Promise<[string, number][]> { + if (uri.scheme !== "file") { + const provider = fileSystemProviders.get(uri.scheme) as + | { readDirectory?(uri: Uri): Promise<[string, number][]> } + | undefined; + if (provider?.readDirectory) { + return provider.readDirectory(uri); + } + throw new Error(`No file system provider for scheme: ${uri.scheme}`); + } + const entries = await fs.promises.readdir(uri.fsPath, { + withFileTypes: true, + }); + return Promise.all( + entries.map(async (entry) => { + if (entry.isDirectory()) { + return [entry.name, FILE_TYPE.Directory] as [string, number]; + } + + if (!entry.isSymbolicLink()) { + return [entry.name, FILE_TYPE.File] as [string, number]; + } + + const entryPath = path.join(uri.fsPath, entry.name); + + try { + const stats = await fs.promises.stat(entryPath); + return [ + entry.name, + (stats.isDirectory() ? FILE_TYPE.Directory : FILE_TYPE.File) | + FILE_TYPE.SymbolicLink, + ] as [string, number]; + } catch { + return [entry.name, FILE_TYPE.SymbolicLink] as [string, number]; + } + }), + ); + }, + isWritableFileSystem(scheme: string): boolean | undefined { + return scheme === "file" ? true : undefined; + }, + }, +}; diff --git a/apps/desktop/src/main/lib/vscode-shim/extension-host-manager.ts b/apps/desktop/src/main/lib/vscode-shim/extension-host-manager.ts new file mode 100644 index 00000000000..d9e3b49250f --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/extension-host-manager.ts @@ -0,0 +1,553 @@ +/** + * Extension Host Manager — manages per-workspace extension host processes. + * + * Each active workspace gets its own child process running extension-host-worker.js, + * providing full isolation of extension state, workspace paths, and webview providers. + * + * Follows the same pattern as host-service-manager.ts for process lifecycle. + */ + +import childProcess from "node:child_process"; +import { randomUUID } from "node:crypto"; +import { EventEmitter } from "node:events"; +import os from "node:os"; +import path from "node:path"; +import { clearWebviewHtml } from "./api/webview-server"; +import type { MainToWorkerMessage, WorkerToMainMessage } from "./ipc-types"; + +const BASE_RESTART_DELAY = 1000; +const MAX_RESTART_DELAY = 30000; +const MAX_RESTART_ATTEMPTS = 5; +const READY_TIMEOUT = 15000; + +interface ExtensionHostProcess { + workspaceId: string; + workspacePath: string; + process: childProcess.ChildProcess | null; + status: "starting" | "running" | "degraded" | "stopped"; + restartCount: number; + lastCrash?: number; +} + +interface PendingResolve { + resolve: (result: { viewId: string | null; html: string | null }) => void; + timer: ReturnType; +} + +export class ExtensionHostManager extends EventEmitter { + private instances = new Map(); + private startPromises = new Map>(); + private pendingResolves = new Map(); + private scheduledRestarts = new Map>(); + private viewIdToWorkspace = new Map(); + private workerScriptPath: string; + private extensionsDir: string; + private enabledConfigPath: string; + + constructor() { + super(); + this.workerScriptPath = path.join(__dirname, "extension-host-worker.js"); + this.extensionsDir = path.join(os.homedir(), ".vscode", "extensions"); + + // Resolve enabled config path + try { + const { app } = require("electron"); + this.enabledConfigPath = path.join( + app.getPath("userData"), + "vscode-extensions-enabled.json", + ); + } catch { + this.enabledConfigPath = path.join( + os.homedir(), + ".superset-desktop", + "vscode-extensions-enabled.json", + ); + } + } + + async start(workspaceId: string, workspacePath: string): Promise { + const existing = this.instances.get(workspaceId); + if ( + existing && + (existing.status === "running" || existing.status === "starting") + ) { + existing.workspacePath = workspacePath; + this.sendToWorker(workspaceId, { + type: "set-workspace-path", + workspacePath, + }); + const inFlightStart = this.startPromises.get(workspaceId); + if (inFlightStart) { + return inFlightStart; + } + if (existing.status === "running" && existing.process) { + return; + } + } + + const inFlightStart = this.startPromises.get(workspaceId); + if (inFlightStart) { + return inFlightStart; + } + + const startPromise = this.spawn(workspaceId, workspacePath).finally(() => { + if (this.startPromises.get(workspaceId) === startPromise) { + this.startPromises.delete(workspaceId); + } + }); + this.startPromises.set(workspaceId, startPromise); + await startPromise; + } + + private async spawn( + workspaceId: string, + workspacePath: string, + ): Promise { + // Inherit restartCount from previous instance so MAX_RESTART_ATTEMPTS is respected + const prevRestartCount = this.instances.get(workspaceId)?.restartCount ?? 0; + const instance: ExtensionHostProcess = { + workspaceId, + workspacePath, + process: null, + status: "starting", + restartCount: prevRestartCount, + }; + this.instances.set(workspaceId, instance); + + const env: Record = { + ...(process.env as Record), + ELECTRON_RUN_AS_NODE: "1", + EXTENSION_HOST_WORKSPACE_ID: workspaceId, + EXTENSION_HOST_WORKSPACE_PATH: workspacePath, + EXTENSION_HOST_EXTENSIONS_DIR: this.extensionsDir, + EXTENSION_HOST_ENABLED_CONFIG: this.enabledConfigPath, + NODE_ENV: process.env.NODE_ENV ?? "production", + }; + + const child = childProcess.spawn( + process.execPath, + [this.workerScriptPath], + { + stdio: ["ignore", "pipe", "pipe", "ipc"], + env, + }, + ); + + instance.process = child; + + // Pipe stdout/stderr with workspace prefix + const onStdout = (data: Buffer) => { + for (const line of data.toString().split("\n").filter(Boolean)) { + console.log(line); + } + }; + const onStderr = (data: Buffer) => { + for (const line of data.toString().split("\n").filter(Boolean)) { + console.error(line); + } + }; + child.stdout?.on("data", onStdout); + child.stderr?.on("data", onStderr); + + // Handle IPC messages from worker + child.on("message", (msg: WorkerToMainMessage) => { + this.handleWorkerMessage(workspaceId, msg); + }); + + // Shared cleanup called from both exit and error handlers. + // Guards against double-execution via instance.process identity check. + const cleanupWorker = (intentional: boolean) => { + if (instance.process !== child) return; + child.stdout?.off("data", onStdout); + child.stderr?.off("data", onStderr); + this.clearTrackedWebviewsForWorkspace(workspaceId); + instance.status = "degraded"; + instance.process = null; + instance.lastCrash = Date.now(); + if (!intentional) { + this.scheduleRestart(workspaceId); + } + }; + + // Handle exit + child.on("exit", (code) => { + console.log( + `[ext-host-manager] Worker ${workspaceId} exited with code ${code}`, + ); + cleanupWorker(instance.status === "stopped"); + }); + + // Wait for ready message + await new Promise((resolve, reject) => { + let settled = false; + + const timer = setTimeout(() => { + if (settled) return; + settled = true; + child.off("message", onMessage); + reject( + new Error( + `Extension host worker ${workspaceId} failed to become ready within ${READY_TIMEOUT}ms`, + ), + ); + }, READY_TIMEOUT); + + const onMessage = (msg: WorkerToMainMessage) => { + if (msg.type === "ready") { + if (settled) return; + settled = true; + clearTimeout(timer); + child.off("message", onMessage); + instance.status = "running"; + instance.restartCount = 0; + resolve(); + } + }; + child.on("message", onMessage); + + child.on("error", (err) => { + if (settled) return; + settled = true; + clearTimeout(timer); + child.off("message", onMessage); + // error may not be followed by exit; run cleanup + restart here as well + cleanupWorker(false); + reject(err); + }); + }); + } + + private handleWorkerMessage( + workspaceId: string, + msg: WorkerToMainMessage, + ): void { + switch (msg.type) { + case "ready": + // Handled in spawn() + break; + + case "webview-event": + // Track viewId → workspaceId mapping + if (msg.event.type === "html" || msg.event.type === "panel-created") { + this.viewIdToWorkspace.set(msg.event.viewId, workspaceId); + } + if (msg.event.type === "dispose") { + this.viewIdToWorkspace.delete(msg.event.viewId); + } + this.emit("webview-event", workspaceId, msg.event); + break; + + case "resolve-webview-result": { + const pending = this.pendingResolves.get(msg.requestId); + if (pending) { + clearTimeout(pending.timer); + this.pendingResolves.delete(msg.requestId); + if (msg.viewId) { + this.viewIdToWorkspace.set(msg.viewId, workspaceId); + } + pending.resolve({ viewId: msg.viewId, html: msg.html }); + } + break; + } + + case "open-file": + this.emit("open-file", workspaceId, msg); + break; + + case "open-diff": + this.emit("open-diff", workspaceId, msg); + break; + + case "show-dialog": + // Proxy dialog calls to Electron main process and return result + this.handleDialogRequest(workspaceId, msg); + break; + + case "show-quickpick": + this.handleQuickPickRequest(workspaceId, msg); + break; + + case "show-open-dialog": + this.handleOpenDialogRequest(workspaceId, msg); + break; + } + } + + private async handleDialogRequest( + workspaceId: string, + msg: Extract, + ): Promise { + try { + const { dialog } = require("electron"); + const result = await dialog.showMessageBox({ + type: + msg.method === "showErrorMessage" + ? "error" + : msg.method === "showWarningMessage" + ? "warning" + : "info", + message: msg.message, + buttons: msg.items, + }); + this.sendToWorker(workspaceId, { + type: "dialog-result", + requestId: msg.requestId, + selectedIndex: result.response, + }); + } catch { + this.sendToWorker(workspaceId, { + type: "dialog-result", + requestId: msg.requestId, + selectedIndex: -1, + }); + } + } + + private async handleQuickPickRequest( + workspaceId: string, + msg: Extract, + ): Promise { + try { + const { dialog } = require("electron"); + const result = await dialog.showMessageBox({ + type: "question", + title: msg.placeHolder ?? "Select", + message: msg.placeHolder ?? "Select an option", + buttons: [...msg.labels, "Cancel"], + cancelId: msg.labels.length, + }); + const selectedIndex = + result.response === msg.labels.length ? -1 : result.response; + this.sendToWorker(workspaceId, { + type: "dialog-result", + requestId: msg.requestId, + selectedIndex, + }); + } catch { + this.sendToWorker(workspaceId, { + type: "dialog-result", + requestId: msg.requestId, + selectedIndex: -1, + }); + } + } + + private async handleOpenDialogRequest( + workspaceId: string, + msg: Extract, + ): Promise { + try { + const { dialog } = require("electron"); + const properties: Array< + "openFile" | "openDirectory" | "multiSelections" + > = []; + if (msg.canSelectFolders) properties.push("openDirectory"); + if (msg.canSelectFiles !== false) properties.push("openFile"); + if (msg.canSelectMany) properties.push("multiSelections"); + const result = await dialog.showOpenDialog({ + properties, + title: msg.title, + filters: msg.filters, + defaultPath: msg.defaultPath, + }); + this.sendToWorker(workspaceId, { + type: "open-dialog-result", + requestId: msg.requestId, + filePaths: + result.canceled || result.filePaths.length === 0 + ? null + : result.filePaths, + }); + } catch { + this.sendToWorker(workspaceId, { + type: "open-dialog-result", + requestId: msg.requestId, + filePaths: null, + }); + } + } + + async resolveWebview( + workspaceId: string, + viewType: string, + extensionPath: string, + ): Promise<{ viewId: string | null; html: string | null }> { + const instance = this.instances.get(workspaceId); + if (!instance?.process || instance.status !== "running") { + return { viewId: null, html: null }; + } + + const requestId = randomUUID(); + + return new Promise((resolve) => { + const timer = setTimeout(() => { + this.pendingResolves.delete(requestId); + resolve({ viewId: null, html: null }); + }, 10000); + + this.pendingResolves.set(requestId, { resolve, timer }); + + this.sendToWorker(workspaceId, { + type: "resolve-webview", + requestId, + viewType, + extensionPath, + }); + }); + } + + postMessageToExtension( + workspaceId: string, + viewId: string, + message: unknown, + ): void { + // Resolve workspace from viewId if not provided + const resolvedWs = workspaceId || this.viewIdToWorkspace.get(viewId); + if (!resolvedWs) return; + + this.sendToWorker(resolvedWs, { + type: "post-message", + viewId, + message, + }); + } + + setActiveEditor( + workspaceId: string, + filePath: string | null, + languageId?: string, + ): void { + this.sendToWorker(workspaceId, { + type: "set-active-editor", + filePath, + languageId, + }); + } + + setWorkspacePath(workspaceId: string, workspacePath: string): void { + const instance = this.instances.get(workspaceId); + if (instance) { + instance.workspacePath = workspacePath; + } + this.sendToWorker(workspaceId, { + type: "set-workspace-path", + workspacePath, + }); + } + + stop(workspaceId: string): void { + const instance = this.instances.get(workspaceId); + if (!instance) return; + + instance.status = "stopped"; + this.startPromises.delete(workspaceId); + + // Cancel scheduled restart + const restartTimer = this.scheduledRestarts.get(workspaceId); + if (restartTimer) { + clearTimeout(restartTimer); + this.scheduledRestarts.delete(workspaceId); + } + + // Send shutdown message + if (instance.process) { + this.sendToWorker(workspaceId, { type: "shutdown" }); + // Force kill after 5s + const killTimer = setTimeout(() => { + instance.process?.kill("SIGKILL"); + }, 5000); + instance.process.on("exit", () => clearTimeout(killTimer)); + } + + this.instances.delete(workspaceId); + } + + stopAll(): void { + for (const id of [...this.instances.keys()]) { + this.stop(id); + } + } + + isRunning(workspaceId: string): boolean { + const instance = this.instances.get(workspaceId); + return instance?.status === "running"; + } + + getWorkspacePath(workspaceId: string): string | undefined { + return this.instances.get(workspaceId)?.workspacePath; + } + + getWorkspaceForViewId(viewId: string): string | undefined { + return this.viewIdToWorkspace.get(viewId); + } + + getRunningWorkspaceIds(): string[] { + return [...this.instances.entries()] + .filter(([, instance]) => instance.status === "running") + .map(([workspaceId]) => workspaceId); + } + + private clearTrackedWebviewsForWorkspace(workspaceId: string): void { + for (const [viewId, wsId] of this.viewIdToWorkspace) { + if (wsId !== workspaceId) continue; + this.viewIdToWorkspace.delete(viewId); + clearWebviewHtml(viewId); + } + } + + private sendToWorker(workspaceId: string, msg: MainToWorkerMessage): void { + const instance = this.instances.get(workspaceId); + if (instance?.process?.connected) { + instance.process.send(msg); + } + } + + private scheduleRestart(workspaceId: string): void { + const instance = this.instances.get(workspaceId); + if (!instance || instance.status === "stopped") return; + + if (instance.restartCount >= MAX_RESTART_ATTEMPTS) { + console.error( + `[ext-host-manager] Max restart attempts (${MAX_RESTART_ATTEMPTS}) reached for ${workspaceId}, giving up`, + ); + instance.status = "stopped"; + return; + } + + const delay = Math.min( + BASE_RESTART_DELAY * 2 ** instance.restartCount, + MAX_RESTART_DELAY, + ); + instance.restartCount++; + + console.log( + `[ext-host-manager] Scheduling restart for ${workspaceId} in ${delay}ms (attempt ${instance.restartCount})`, + ); + + const timer = setTimeout(() => { + this.scheduledRestarts.delete(workspaceId); + // Use start() instead of spawn() directly so startPromises dedup is respected + const current = this.instances.get(workspaceId); + if (current?.status === "degraded") { + this.start(workspaceId, current.workspacePath).catch((err) => { + console.error( + `[ext-host-manager] Restart failed for ${workspaceId}:`, + err, + ); + }); + } + }, delay); + + this.scheduledRestarts.set(workspaceId, timer); + } +} + +// Singleton +let manager: ExtensionHostManager | null = null; + +export function getExtensionHostManager(): ExtensionHostManager { + if (!manager) { + manager = new ExtensionHostManager(); + } + return manager; +} diff --git a/apps/desktop/src/main/lib/vscode-shim/extension-host.ts b/apps/desktop/src/main/lib/vscode-shim/extension-host.ts new file mode 100644 index 00000000000..5fd90c21879 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/extension-host.ts @@ -0,0 +1,37 @@ +/** + * Extension Host: high-level API to manage VS Code extensions in Superset Desktop. + * + * In the per-workspace model, extension loading is done by individual worker processes + * managed by ExtensionHostManager. This module handles process-level setup only. + */ + +import { registerWebviewProtocol } from "./api/protocol-handler"; +import { startWebviewServer, stopWebviewServer } from "./api/webview-server"; +import { getExtensionHostManager } from "./extension-host-manager"; + +let isInitialized = false; + +export async function initExtensionHost(): Promise { + if (isInitialized) { + return; + } + + // Register protocol handler for webview resources + registerWebviewProtocol(); + + // Start HTTP server for webview content + await startWebviewServer(); + + // Initialize manager singleton + getExtensionHostManager(); + + isInitialized = true; +} + +export async function shutdownExtensionHost(): Promise { + getExtensionHostManager().stopAll(); + stopWebviewServer(); + isInitialized = false; +} + +export { isInitialized as isExtensionHostInitialized }; diff --git a/apps/desktop/src/main/lib/vscode-shim/index.ts b/apps/desktop/src/main/lib/vscode-shim/index.ts new file mode 100644 index 00000000000..3d1a3d13d1b --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/index.ts @@ -0,0 +1,33 @@ +/** + * VS Code Extension Host Shim for Superset Desktop. + * + * Provides a minimal VS Code API surface to run official VS Code extensions + * (Claude Code, ChatGPT/Codex) inside the Electron app. + */ + +export { + getActivePanel, + getActiveView, + getViewProvider, + onWebviewEvent, + resolveWebviewView, +} from "./api/webview"; +export { clearWebviewHtml, setWebviewHtml } from "./api/webview-server"; +export { handleUri, setActiveTextEditor } from "./api/window"; +export { + initExtensionHost, + shutdownExtensionHost, +} from "./extension-host"; +export { + deactivateExtension, + discoverExtensions, + getLoadedExtension, + getLoadedExtensions, + loadExtension, +} from "./loader"; +export type { + ExtensionInfo, + ExtensionManifest, + WebviewMessage, +} from "./types"; +export { webviewBridge } from "./webview-bridge"; diff --git a/apps/desktop/src/main/lib/vscode-shim/ipc-types.ts b/apps/desktop/src/main/lib/vscode-shim/ipc-types.ts new file mode 100644 index 00000000000..1b59033aa41 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/ipc-types.ts @@ -0,0 +1,75 @@ +/** + * Typed IPC message definitions for communication between + * the main process and per-workspace extension host worker processes. + */ + +/** Messages sent FROM main process TO worker */ +export type MainToWorkerMessage = + | { type: "set-active-editor"; filePath: string | null; languageId?: string } + | { type: "set-workspace-path"; workspacePath: string } + | { + type: "resolve-webview"; + requestId: string; + viewType: string; + extensionPath: string; + } + | { type: "post-message"; viewId: string; message: unknown } + | { type: "shutdown" } + | { type: "dialog-result"; requestId: string; selectedIndex: number } + | { + type: "open-dialog-result"; + requestId: string; + filePaths: string[] | null; + }; + +/** Messages sent FROM worker TO main process */ +export type WorkerToMainMessage = + | { type: "ready" } + | { + type: "webview-event"; + event: { + viewId: string; + type: "html" | "message" | "title" | "dispose" | "panel-created"; + data: unknown; + }; + } + | { + type: "resolve-webview-result"; + requestId: string; + viewId: string | null; + html: string | null; + } + | { type: "open-file"; filePath: string; line?: number } + | { + type: "open-diff"; + leftUri: string; + rightUri: string; + title?: string; + leftContent?: string; + } + | { + type: "show-dialog"; + requestId: string; + method: + | "showInformationMessage" + | "showWarningMessage" + | "showErrorMessage"; + message: string; + items: string[]; + } + | { + type: "show-quickpick"; + requestId: string; + labels: string[]; + placeHolder?: string; + } + | { + type: "show-open-dialog"; + requestId: string; + canSelectFiles?: boolean; + canSelectFolders?: boolean; + canSelectMany?: boolean; + title?: string; + filters?: Array<{ name: string; extensions: string[] }>; + defaultPath?: string; + }; diff --git a/apps/desktop/src/main/lib/vscode-shim/loader.ts b/apps/desktop/src/main/lib/vscode-shim/loader.ts new file mode 100644 index 00000000000..bf4fc13a9da --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/loader.ts @@ -0,0 +1,209 @@ +/** + * Extension loader: discovers, loads, and activates VS Code extensions. + * + * Intercepts `require('vscode')` via Module._resolveFilename so that + * extensions receive our shim instead of the real VS Code API. + */ + +import fs from "node:fs"; +import Module from "node:module"; +import path from "node:path"; +import { registerExtensionDefaults } from "./api/configuration"; +import { shimLog, shimWarn } from "./api/debug-log"; +import { + createExtensionContext, + type VscodeExtensionContext, +} from "./api/extension-context"; +import type { ExtensionInfo, ExtensionManifest } from "./types"; +import { createVscodeApi } from "./vscode-api"; + +const vscodeApi = createVscodeApi(); +let interceptInstalled = false; + +type ResolveFilename = ( + this: unknown, + request: string, + parent: unknown, + isMain: boolean, + options: unknown, +) => string; + +function installRequireIntercept(): void { + if (interceptInstalled) return; + interceptInstalled = true; + + // Inject vscode shim into require cache so require('vscode') returns our API. + // We use _resolveFilename to redirect 'vscode' to a known cache key, + // and pre-populate the cache with our shim module. + const VSCODE_CACHE_KEY = path.join(__dirname, "__vscode_shim_module__"); + + // Pre-populate the require cache + require.cache[VSCODE_CACHE_KEY] = { + id: VSCODE_CACHE_KEY, + filename: VSCODE_CACHE_KEY, + loaded: true, + exports: vscodeApi, + children: [], + paths: [], + path: __dirname, + parent: null, + require, + isPreloading: false, + } as unknown as NodeModule; + + const moduleWithResolver = Module as unknown as { + _resolveFilename: ResolveFilename; + }; + const originalResolveFilename = moduleWithResolver._resolveFilename; + moduleWithResolver._resolveFilename = function ( + request: string, + parent: unknown, + isMain: boolean, + options: unknown, + ) { + if (request === "vscode") { + return VSCODE_CACHE_KEY; + } + return originalResolveFilename.call(this, request, parent, isMain, options); + }; +} + +export function discoverExtensions(extensionsDir: string): ExtensionInfo[] { + if (!fs.existsSync(extensionsDir)) return []; + + const results: ExtensionInfo[] = []; + const entries = fs.readdirSync(extensionsDir, { withFileTypes: true }); + + for (const entry of entries) { + if (!entry.isDirectory()) continue; + const extPath = path.join(extensionsDir, entry.name); + const manifestPath = path.join(extPath, "package.json"); + + if (!fs.existsSync(manifestPath)) continue; + + try { + const manifest: ExtensionManifest = JSON.parse( + fs.readFileSync(manifestPath, "utf-8"), + ); + if (!manifest.main) continue; + + const id = `${manifest.publisher}.${manifest.name}`.toLowerCase(); + results.push({ + id, + extensionPath: extPath, + manifest, + isActive: false, + }); + } catch (err) { + shimWarn(`[vscode-shim] Failed to parse ${manifestPath}:`, err); + } + } + + return results; +} + +interface LoadedExtension { + info: ExtensionInfo; + context: VscodeExtensionContext; + exports: Record; +} + +const loadedExtensions = new Map(); + +export async function loadExtension( + info: ExtensionInfo, +): Promise { + const existing = loadedExtensions.get(info.id); + if (existing) { + return existing; + } + + installRequireIntercept(); + + // Register default configuration values + registerExtensionDefaults(info.manifest); + + // Create extension context + const context = createExtensionContext( + info.id, + info.extensionPath, + info.manifest, + ); + + // Load the extension's main module + const manifestMain = info.manifest.main; + if (!manifestMain) { + throw new Error(`[vscode-shim] Extension ${info.id} has no main entry`); + } + const mainPath = path.resolve(info.extensionPath, manifestMain); + shimLog(`[vscode-shim] Loading extension: ${info.id} from ${mainPath}`); + + let extensionModule: Record; + try { + extensionModule = require(mainPath); + } catch (err) { + console.error(`[vscode-shim] Failed to require ${info.id}:`, err); + throw err; + } + + // Activate the extension + if (typeof extensionModule.activate === "function") { + shimLog(`[vscode-shim] Activating extension: ${info.id}`); + try { + await extensionModule.activate(context); + info.isActive = true; + shimLog(`[vscode-shim] Extension activated: ${info.id}`); + } catch (err) { + console.error(`[vscode-shim] Failed to activate ${info.id}:`, err); + throw err; + } + } + + const loaded: LoadedExtension = { + info, + context, + exports: extensionModule, + }; + + loadedExtensions.set(info.id, loaded); + return loaded; +} + +export async function deactivateExtension(extensionId: string): Promise { + const loaded = loadedExtensions.get(extensionId); + if (!loaded) return; + + if (typeof loaded.exports.deactivate === "function") { + try { + await loaded.exports.deactivate(); + } catch (err) { + console.error(`[vscode-shim] Failed to deactivate ${extensionId}:`, err); + } + } + + // Dispose all subscriptions + for (const sub of loaded.context.subscriptions) { + try { + sub.dispose(); + } catch {} + } + + loaded.info.isActive = false; + loadedExtensions.delete(extensionId); +} + +export async function deactivateAll(): Promise { + for (const id of [...loadedExtensions.keys()]) { + await deactivateExtension(id); + } +} + +export function getLoadedExtension( + extensionId: string, +): LoadedExtension | undefined { + return loadedExtensions.get(extensionId); +} + +export function getLoadedExtensions(): LoadedExtension[] { + return [...loadedExtensions.values()]; +} diff --git a/apps/desktop/src/main/lib/vscode-shim/types.ts b/apps/desktop/src/main/lib/vscode-shim/types.ts new file mode 100644 index 00000000000..61a731869e6 --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/types.ts @@ -0,0 +1,87 @@ +/** + * Shared types for the VS Code extension host shim. + */ + +export interface ExtensionManifest { + name: string; + publisher: string; + version: string; + main?: string; + activationEvents?: string[]; + contributes?: { + commands?: Array<{ + command: string; + title: string; + category?: string; + icon?: string | { light: string; dark: string }; + enablement?: string; + }>; + views?: Record< + string, + Array<{ + id: string; + name: string; + type?: string; + when?: string; + }> + >; + viewsContainers?: { + activitybar?: Array<{ + id: string; + title: string; + icon: string; + }>; + panel?: Array<{ + id: string; + title: string; + icon: string; + }>; + }; + configuration?: ConfigurationSchema | ConfigurationSchema[]; + menus?: Record< + string, + Array<{ command: string; when?: string; group?: string }> + >; + keybindings?: Array<{ + command: string; + key: string; + mac?: string; + when?: string; + }>; + jsonValidation?: Array<{ fileMatch: string; url: string }>; + languages?: Array<{ + id: string; + extensions?: string[]; + filenames?: string[]; + }>; + }; + extensionDependencies?: string[]; + enabledApiProposals?: string[]; +} + +interface ConfigurationSchema { + title?: string; + properties?: Record< + string, + { + type?: string; + default?: unknown; + description?: string; + enum?: unknown[]; + enumDescriptions?: string[]; + } + >; +} + +export interface ExtensionInfo { + id: string; + extensionPath: string; + manifest: ExtensionManifest; + isActive: boolean; +} + +export interface WebviewMessage { + viewId: string; + type: "html" | "message" | "title" | "options"; + data: unknown; +} diff --git a/apps/desktop/src/main/lib/vscode-shim/vscode-api.ts b/apps/desktop/src/main/lib/vscode-shim/vscode-api.ts new file mode 100644 index 00000000000..b52e0b58dfa --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/vscode-api.ts @@ -0,0 +1,753 @@ +/** + * Factory that creates a `vscode` module-like namespace object. + * Extensions receive this when they `require('vscode')`. + * + * Unimplemented API accesses are logged via Proxy so we can + * discover which APIs extensions actually use at runtime. + */ + +import { commands } from "./api/commands"; +import { shimWarn } from "./api/debug-log"; +import { + CancellationTokenSource, + Disposable, + EventEmitter, +} from "./api/event-emitter"; +import { Uri } from "./api/uri"; +import { window } from "./api/window"; +import { workspace } from "./api/workspace"; + +// VS Code enums +const StatusBarAlignment = { Left: 1, Right: 2 } as const; +const ViewColumn = { + Active: -1, + Beside: -2, + One: 1, + Two: 2, + Three: 3, + Four: 4, +} as const; +const ProgressLocation = { + SourceControl: 1, + Window: 10, + Notification: 15, +} as const; +const ConfigurationTarget = { + Global: 1, + Workspace: 2, + WorkspaceFolder: 3, +} as const; +const DiagnosticSeverity = { + Error: 0, + Warning: 1, + Information: 2, + Hint: 3, +} as const; +const FileType = { + Unknown: 0, + File: 1, + Directory: 2, + SymbolicLink: 64, +} as const; +const EndOfLine = { LF: 1, CRLF: 2 } as const; +const OverviewRulerLane = { Left: 1, Center: 2, Right: 4, Full: 7 } as const; +const ExtensionMode = { Production: 1, Development: 2, Test: 3 } as const; +const TreeItemCollapsibleState = { + None: 0, + Collapsed: 1, + Expanded: 2, +} as const; +const TextEditorRevealType = { + Default: 0, + InCenter: 1, + InCenterIfOutsideViewport: 2, + AtTop: 3, +} as const; +const EnvironmentVariableMutatorType = { + Replace: 1, + Append: 2, + Prepend: 3, +} as const; +const UIKind = { Desktop: 1, Web: 2 } as const; +const LogLevel = { + Off: 0, + Trace: 1, + Debug: 2, + Info: 3, + Warning: 4, + Error: 5, +} as const; +const ExtensionKind = { UI: 1, Workspace: 2 } as const; +const ColorThemeKind = { + Light: 1, + Dark: 2, + HighContrast: 3, + HighContrastLight: 4, +} as const; +const SymbolKind = { + File: 0, + Module: 1, + Namespace: 2, + Package: 3, + Class: 4, + Method: 5, + Property: 6, + Field: 7, + Constructor: 8, + Enum: 9, + Interface: 10, + Function: 11, + Variable: 12, + Constant: 13, + String: 14, + Number: 15, + Boolean: 16, + Array: 17, + Object: 18, + Key: 19, + Null: 20, + EnumMember: 21, + Struct: 22, + Event: 23, + Operator: 24, + TypeParameter: 25, +} as const; +const CompletionItemKind = { + Text: 0, + Method: 1, + Function: 2, + Constructor: 3, + Field: 4, + Variable: 5, + Class: 6, + Interface: 7, + Module: 8, + Property: 9, + Unit: 10, + Value: 11, + Enum: 12, + Keyword: 13, + Snippet: 14, + Color: 15, + File: 16, + Reference: 17, + Folder: 18, + EnumMember: 19, + Constant: 20, + Struct: 21, + Event: 22, + Operator: 23, + TypeParameter: 24, +} as const; +const TextDocumentChangeReason = { Undo: 1, Redo: 2 } as const; +const QuickPickItemKind = { + Default: 0, + Separator: -1, +} as const; + +// Stub classes +class Position { + readonly line: number; + readonly character: number; + constructor(line: number, character: number) { + this.line = line; + this.character = character; + } + isEqual(other: Position): boolean { + return this.line === other.line && this.character === other.character; + } + isBefore(other: Position): boolean { + return ( + this.line < other.line || + (this.line === other.line && this.character < other.character) + ); + } + isAfter(other: Position): boolean { + return !this.isEqual(other) && !this.isBefore(other); + } + translate(lineDelta?: number, characterDelta?: number): Position { + return new Position( + this.line + (lineDelta ?? 0), + this.character + (characterDelta ?? 0), + ); + } + with(line?: number, character?: number): Position { + return new Position(line ?? this.line, character ?? this.character); + } + compareTo(other: Position): number { + return this.line - other.line || this.character - other.character; + } +} + +class Range { + readonly start: Position; + readonly end: Position; + constructor( + startLine: number | Position, + startChar: number | Position, + endLine?: number, + endChar?: number, + ) { + if (typeof startLine === "number") { + if (typeof endLine !== "number" || typeof endChar !== "number") { + throw new TypeError("Range requires endLine and endChar"); + } + this.start = new Position(startLine, startChar as number); + this.end = new Position(endLine, endChar); + } else { + this.start = startLine; + this.end = startChar as Position; + } + } + get isEmpty(): boolean { + return this.start.isEqual(this.end); + } + contains(_positionOrRange: Position | Range): boolean { + return true; + } + with(start?: Position, end?: Position): Range { + return new Range(start ?? this.start, end ?? this.end); + } +} + +class Selection extends Range { + readonly anchor: Position; + readonly active: Position; + constructor( + anchorLine: number | Position, + anchorChar: number | Position, + activeLine?: number, + activeChar?: number, + ) { + if (typeof anchorLine === "number") { + if (typeof activeLine !== "number" || typeof activeChar !== "number") { + throw new TypeError("Selection requires activeLine and activeChar"); + } + super(anchorLine, anchorChar as number, activeLine, activeChar); + this.anchor = new Position(anchorLine, anchorChar as number); + this.active = new Position(activeLine, activeChar); + } else { + super(anchorLine, anchorChar as Position); + this.anchor = anchorLine; + this.active = anchorChar as Position; + } + } + get isReversed(): boolean { + return this.anchor.isAfter(this.active); + } +} + +class ThemeColor { + readonly id: string; + constructor(id: string) { + this.id = id; + } +} + +class ThemeIcon { + static readonly File = new ThemeIcon("file"); + static readonly Folder = new ThemeIcon("folder"); + readonly id: string; + readonly color?: ThemeColor; + constructor(id: string, color?: ThemeColor) { + this.id = id; + this.color = color; + } +} + +class MarkdownString { + value: string; + isTrusted?: boolean; + supportThemeIcons?: boolean; + supportHtml?: boolean; + constructor(value?: string, supportThemeIcons?: boolean) { + this.value = value ?? ""; + this.supportThemeIcons = supportThemeIcons; + } + appendText(value: string): MarkdownString { + this.value += value; + return this; + } + appendMarkdown(value: string): MarkdownString { + this.value += value; + return this; + } + appendCodeblock(code: string, language?: string): MarkdownString { + this.value += `\n\`\`\`${language ?? ""}\n${code}\n\`\`\`\n`; + return this; + } +} + +class WorkspaceEdit { + private _edits: Array<{ + uri: Uri; + edits: Array<{ range: Range; newText: string }>; + }> = []; + replace(uri: Uri, range: Range, newText: string): void { + this._edits.push({ uri, edits: [{ range, newText }] }); + } + insert(uri: Uri, position: Position, newText: string): void { + this.replace(uri, new Range(position, position), newText); + } + delete(uri: Uri, range: Range): void { + this.replace(uri, range, ""); + } + /** Set all edits for a given URI (replaces existing edits for that URI) */ + set( + uri: Uri, + edits: Array<{ range: Range; newText: string } | unknown>, + ): void { + const textEdits = (edits as Array<{ range?: Range; newText?: string }>) + .filter((e) => e && "range" in e && "newText" in e) + .map((e) => ({ range: e.range as Range, newText: e.newText as string })); + const existing = this._edits.find( + (e) => e.uri.toString() === uri.toString(), + ); + if (existing) { + existing.edits = textEdits; + } else if (textEdits.length > 0) { + this._edits.push({ uri, edits: textEdits }); + } + } + entries(): Array<[Uri, Array<{ range: Range; newText: string }>]> { + return this._edits.map((e) => [e.uri, e.edits]); + } +} + +class CodeLens { + readonly range: Range; + command?: { title: string; command: string; arguments?: unknown[] }; + constructor( + range: Range, + command?: { title: string; command: string; arguments?: unknown[] }, + ) { + this.range = range; + this.command = command; + } + get isResolved(): boolean { + return !!this.command; + } +} + +class TabInputText { + readonly uri: Uri; + constructor(uri: Uri) { + this.uri = uri; + } +} + +class NotebookCellOutputItem { + readonly mime: string; + readonly data: Uint8Array; + constructor(data: Uint8Array, mime: string) { + this.data = data; + this.mime = mime; + } + static text(value: string, mime?: string): NotebookCellOutputItem { + return new NotebookCellOutputItem( + new TextEncoder().encode(value), + mime ?? "text/plain", + ); + } + static json(value: unknown, mime?: string): NotebookCellOutputItem { + return new NotebookCellOutputItem( + new TextEncoder().encode(JSON.stringify(value)), + mime ?? "application/json", + ); + } + static stdout(value: string): NotebookCellOutputItem { + return NotebookCellOutputItem.text( + value, + "application/vnd.code.notebook.stdout", + ); + } + static stderr(value: string): NotebookCellOutputItem { + return NotebookCellOutputItem.text( + value, + "application/vnd.code.notebook.stderr", + ); + } + static error(err: Error): NotebookCellOutputItem { + return NotebookCellOutputItem.text( + JSON.stringify({ + name: err.name, + message: err.message, + stack: err.stack, + }), + "application/vnd.code.notebook.error", + ); + } +} + +class NotebookCellOutput { + readonly items: NotebookCellOutputItem[]; + readonly metadata?: Record; + constructor( + items: NotebookCellOutputItem[], + metadata?: Record, + ) { + this.items = items; + this.metadata = metadata; + } +} + +const NotebookCellKind = { + Markup: 1, + Code: 2, +} as const; + +class NotebookEdit { + readonly range?: unknown; + readonly newCells?: unknown[]; + readonly index?: number; + readonly metadata?: Record; + readonly kind: + | "replaceCells" + | "insertCells" + | "deleteCells" + | "updateCellMetadata"; + + constructor( + range?: unknown, + newCells?: unknown[], + options?: { + index?: number; + metadata?: Record; + kind?: + | "replaceCells" + | "insertCells" + | "deleteCells" + | "updateCellMetadata"; + }, + ) { + this.range = range; + this.newCells = newCells; + this.index = options?.index; + this.metadata = options?.metadata; + this.kind = options?.kind ?? "replaceCells"; + } + + static replaceCells(range: unknown, newCells: unknown[]): NotebookEdit { + return new NotebookEdit(range, newCells, { kind: "replaceCells" }); + } + + static insertCells(index: number, newCells: unknown[]): NotebookEdit { + return new NotebookEdit(undefined, newCells, { + index, + kind: "insertCells", + }); + } + + static deleteCells(range: unknown): NotebookEdit { + return new NotebookEdit(range, undefined, { kind: "deleteCells" }); + } + + static updateCellMetadata( + index: number, + metadata: Record, + ): NotebookEdit { + return new NotebookEdit(undefined, undefined, { + index, + metadata, + kind: "updateCellMetadata", + }); + } +} + +class TabInputTextDiff { + readonly original: Uri; + readonly modified: Uri; + constructor(original: Uri, modified: Uri) { + this.original = original; + this.modified = modified; + } +} + +// Languages namespace (stub) +const languages = { + getDiagnostics(_resource?: unknown): unknown[] { + // Without args: return iterable of [Uri, Diagnostic[]] pairs + // With uri arg: return Diagnostic[] + return []; + }, + onDidChangeDiagnostics: new EventEmitter().event, + createDiagnosticCollection(_name?: string) { + const items = new Map(); + return { + name: _name ?? "", + set(uri: Uri, diagnostics: unknown[]) { + items.set(uri.toString(), diagnostics); + }, + delete(uri: Uri) { + items.delete(uri.toString()); + }, + clear() { + items.clear(); + }, + dispose() { + items.clear(); + }, + }; + }, + registerCodeLensProvider(_selector: unknown, _provider: unknown): Disposable { + return new Disposable(() => {}); + }, + registerHoverProvider(_selector: unknown, _provider: unknown): Disposable { + return new Disposable(() => {}); + }, + registerDefinitionProvider( + _selector: unknown, + _provider: unknown, + ): Disposable { + return new Disposable(() => {}); + }, + registerReferenceProvider( + _selector: unknown, + _provider: unknown, + ): Disposable { + return new Disposable(() => {}); + }, + registerDocumentSymbolProvider( + _selector: unknown, + _provider: unknown, + ): Disposable { + return new Disposable(() => {}); + }, + registerCompletionItemProvider( + _selector: unknown, + _provider: unknown, + ..._triggerCharacters: string[] + ): Disposable { + return new Disposable(() => {}); + }, +}; + +// Extensions namespace +const extensions = { + getExtension(extensionId: string): unknown { + try { + const { getLoadedExtension } = + require("./loader") as typeof import("./loader"); + const loaded = getLoadedExtension(extensionId); + if (loaded) { + return { + id: loaded.info.id, + extensionPath: loaded.info.extensionPath, + extensionUri: Uri.file(loaded.info.extensionPath), + isActive: loaded.info.isActive, + packageJSON: loaded.info.manifest, + exports: loaded.exports, + }; + } + } catch {} + return undefined; + }, + all: [] as unknown[], + onDidChange: new EventEmitter().event, +}; + +// Env namespace +const env = { + appName: "Visual Studio Code", + appRoot: process.cwd(), + appHost: "superset-desktop", + language: "en", + clipboard: { + async readText(): Promise { + try { + const { clipboard } = require("electron"); + return clipboard.readText(); + } catch { + return ""; + } + }, + async writeText(text: string): Promise { + try { + const { clipboard } = require("electron"); + clipboard.writeText(text); + } catch {} + }, + }, + machineId: "superset-desktop", + sessionId: `session-${Date.now()}`, + uriScheme: "vscode", + shell: process.env.SHELL ?? "/bin/zsh", + get uiKind() { + return UIKind.Desktop; + }, + get logLevel() { + return LogLevel.Info; + }, + onDidChangeLogLevel: new EventEmitter().event, + remoteName: undefined as string | undefined, + isNewAppInstall: false, + isTelemetryEnabled: false, + onDidChangeTelemetryEnabled: new EventEmitter().event, + createTelemetryLogger(_sender: unknown, _options?: unknown) { + return { + logUsage() {}, + logError() {}, + dispose() {}, + onDidChangeEnableStates: new EventEmitter().event, + }; + }, + async openExternal(_target: Uri): Promise { + try { + const { shell } = require("electron"); + shell.openExternal(_target.toString()); + } catch {} + return true; + }, + async asExternalUri(uri: Uri): Promise { + return uri; + }, +}; + +// Authentication namespace +const authentication = { + getSession( + _providerId: string, + _scopes: string[], + _options?: unknown, + ): Promise { + return Promise.resolve(undefined); + }, + registerAuthenticationProvider( + _id: string, + _label: string, + _provider: unknown, + _options?: unknown, + ): Disposable { + return new Disposable(() => {}); + }, + onDidChangeSessions: new EventEmitter().event, +}; + +// l10n namespace +const l10n = { + t(message: string, ..._args: unknown[]): string { + return message; + }, + bundle: undefined as unknown, + uri: undefined as unknown, +}; + +// Build the vscode namespace +export function createVscodeApi(): Record { + const api: Record = { + // Module interop flags + __esModule: true, + + // VS Code version (extensions check this for feature availability) + version: "1.96.0", + + // Namespaces + commands, + workspace, + window, + languages, + extensions, + env, + authentication, + l10n, + + // Proposed API: chat sessions + chat: { + _providers: new Map(), + registerChatSessionItemProvider(id: string, provider: unknown) { + (this as { _providers: Map })._providers.set( + id, + provider, + ); + return new Disposable(() => { + (this as { _providers: Map })._providers.delete(id); + }); + }, + getSessionProvider(id: string): unknown { + return (this as { _providers: Map })._providers.get( + id, + ); + }, + }, + + // Proposed API: language model + lm: { + _models: [] as Array<{ + id: string; + vendor: string; + family: string; + version: string; + }>, + onDidChangeChatModels: new EventEmitter().event, + async selectChatModels(_selector?: { + vendor?: string; + family?: string; + id?: string; + }): Promise { + return []; + }, + async sendChatRequest( + _model: unknown, + _messages: unknown[], + _options?: unknown, + ): Promise { + throw new Error("Language model API not available in Superset Desktop"); + }, + getModelProxy: undefined as unknown, + isModelProxyAvailable: false, + }, + + // Classes + Uri, + Position, + Range, + Selection, + Disposable, + EventEmitter, + CancellationTokenSource, + ThemeColor, + ThemeIcon, + MarkdownString, + WorkspaceEdit, + TabInputText, + TabInputTextDiff, + NotebookCellOutputItem, + NotebookCellOutput, + NotebookCellKind, + NotebookEdit, + CodeLens, + + // Enums + StatusBarAlignment, + ViewColumn, + ProgressLocation, + ConfigurationTarget, + DiagnosticSeverity, + FileType, + EndOfLine, + OverviewRulerLane, + ExtensionMode, + TreeItemCollapsibleState, + TextEditorRevealType, + EnvironmentVariableMutatorType, + UIKind, + LogLevel, + ExtensionKind, + ColorThemeKind, + SymbolKind, + CompletionItemKind, + TextDocumentChangeReason, + QuickPickItemKind, + }; + + // Proxy logger: log access to unimplemented APIs + return new Proxy(api, { + get(target, prop, receiver) { + if (typeof prop === "string" && !(prop in target)) { + shimWarn(`[vscode-shim] Unimplemented API accessed: vscode.${prop}`); + return undefined; + } + return Reflect.get(target, prop, receiver); + }, + }); +} diff --git a/apps/desktop/src/main/lib/vscode-shim/webview-bridge.ts b/apps/desktop/src/main/lib/vscode-shim/webview-bridge.ts new file mode 100644 index 00000000000..12188418f2f --- /dev/null +++ b/apps/desktop/src/main/lib/vscode-shim/webview-bridge.ts @@ -0,0 +1,111 @@ +/** + * Webview bridge: manages communication between VS Code extension webviews + * and the Superset Desktop renderer process via an EventEmitter. + * + * The tRPC router subscribes to these events and forwards them to the renderer. + */ + +import { EventEmitter } from "node:events"; +import { shimLog, shimWarn } from "./api/debug-log"; +import { + getActiveView, + onWebviewEvent, + resolveWebviewView, + type WebviewEvent, + type WebviewInternal, +} from "./api/webview"; +import { clearWebviewHtml, setWebviewHtml } from "./api/webview-server"; + +export interface WebviewBridgeEvent { + type: "html" | "message" | "title" | "dispose" | "panel-created"; + viewId: string; + data: unknown; +} + +class WebviewBridge extends EventEmitter { + private _viewHtml = new Map(); + private _viewIds = new Map(); // viewType -> viewId + + constructor() { + super(); + // Listen for events from the webview shim + onWebviewEvent((event: WebviewEvent) => { + shimLog( + `[webview-bridge] Event: type=${event.type}, viewId=${event.viewId}, dataLen=${typeof event.data === "string" ? event.data.length : "N/A"}`, + ); + if (event.type === "html") { + this._viewHtml.set(event.viewId, event.data as string); + setWebviewHtml(event.viewId, event.data as string); + shimLog( + `[webview-bridge] Stored HTML for ${event.viewId}, htmlStore now has ${this._viewHtml.size} entries`, + ); + } + if (event.type === "dispose") { + this._viewHtml.delete(event.viewId); + clearWebviewHtml(event.viewId); + // Remove from viewType→viewId map + for (const [vt, vid] of this._viewIds) { + if (vid === event.viewId) { + this._viewIds.delete(vt); + break; + } + } + } + this.emit("webview-event", event); + }); + } + + /** Resolve a webview view (called when renderer requests a sidebar view) */ + resolveView(viewType: string, extensionPath: string): string | undefined { + shimLog(`[vscode-shim] WebviewBridge.resolveView called: ${viewType}`); + const result = resolveWebviewView(viewType, extensionPath); + if (!result) { + shimWarn( + `[vscode-shim] WebviewBridge.resolveView: no result for ${viewType}`, + ); + return undefined; + } + + const { viewId } = result; + this._viewIds.set(viewType, viewId); + return viewId; + } + + /** Get current HTML for a view */ + getHtml(viewId: string): string | undefined { + return this._viewHtml.get(viewId); + } + + /** Get all registered view types */ + getViewTypes(): string[] { + return [...this._viewIds.keys()]; + } + + /** Get viewId for a viewType */ + getViewId(viewType: string): string | undefined { + return this._viewIds.get(viewType); + } + + /** Send message from renderer to extension webview */ + postMessageToExtension(viewId: string, message: unknown): void { + const view = getActiveView(viewId); + if (view) { + (view.webview as WebviewInternal)._onDidReceiveMessage.fire(message); + } + } + + /** Subscribe to messages from extension to webview (postMessage calls) */ + subscribeToExtensionMessages( + viewId: string, + callback: (message: unknown) => void, + ): () => void { + const view = getActiveView(viewId); + if (!view) return () => {}; + const disposable = ( + view.webview as WebviewInternal + )._onDidPostMessage.event(callback); + return () => disposable.dispose(); + } +} + +export const webviewBridge = new WebviewBridge(); diff --git a/apps/desktop/src/main/lib/window-manager/index.ts b/apps/desktop/src/main/lib/window-manager/index.ts new file mode 100644 index 00000000000..ea9727bcc0f --- /dev/null +++ b/apps/desktop/src/main/lib/window-manager/index.ts @@ -0,0 +1,280 @@ +import { join } from "node:path"; +import { type BrowserWindow, ipcMain, nativeTheme } from "electron"; +import { createWindow } from "lib/electron-app/factories/windows/create"; +import { PLATFORM } from "shared/constants"; +import { appState } from "../app-state"; +import { + applyVibrancy, + DEFAULT_VIBRANCY_STATE, + getInitialWindowOptions as getInitialVibrancyOptions, +} from "../vibrancy"; + +interface TearoffWindowOptions { + windowId: string; + screenX: number; + screenY: number; + width?: number; + height?: number; +} + +interface TearoffTabData { + tab: unknown; + panes: Record; + workspaceId: string; +} + +interface PendingAuthToken { + token: string; + expiresAt: string; +} + +type IpcHandler = { + attachWindow: (window: BrowserWindow) => void; + detachWindow: (window: BrowserWindow) => void; +}; + +export class WindowManager { + private windows = new Map(); + private ipcHandler: IpcHandler | null = null; + private ipcRegistered = false; + private pendingTearoffData = new Map(); + private pendingAuthTokens = new Map(); + + setIpcHandler(handler: IpcHandler): void { + this.ipcHandler = handler; + this.registerIpcHandlers(); + } + + private registerIpcHandlers(): void { + if (this.ipcRegistered) return; + this.ipcRegistered = true; + + // Synchronous IPC: preload fetches tearoff data before React starts + ipcMain.on("get-tearoff-data", (event, windowId: string) => { + const data = this.pendingTearoffData.get(windowId); + if (data) this.pendingTearoffData.delete(windowId); + event.returnValue = data ?? null; + }); + + // Synchronous IPC: preload fetches auth token for tearoff windows + ipcMain.on("get-tearoff-auth-token", (event, windowId: string) => { + const token = this.pendingAuthTokens.get(windowId); + if (token !== undefined) this.pendingAuthTokens.delete(windowId); + event.returnValue = token ?? null; + }); + + // Tearoff window closing: return all tabs to main window (single message) + ipcMain.on( + "tearoff-return-tabs", + ( + _event, + data: Array<{ tab: unknown; panes: Record }>, + ) => { + const mainWindow = this.getMain(); + if (mainWindow && !mainWindow.isDestroyed()) { + mainWindow.webContents.send("tearoff-tab-returned", data); + } else { + console.warn( + "[window-manager] Main window unavailable; returned tabs lost:", + data.length, + ); + } + }, + ); + } + + setPendingTearoffData(windowId: string, data: TearoffTabData): void { + this.pendingTearoffData.set(windowId, data); + setTimeout(() => this.pendingTearoffData.delete(windowId), 30_000); + } + + setPendingAuthToken(windowId: string, token: PendingAuthToken | null): void { + this.pendingAuthTokens.set(windowId, token); + setTimeout(() => this.pendingAuthTokens.delete(windowId), 30_000); + } + + register(windowId: string, window: BrowserWindow): void { + this.windows.set(windowId, window); + } + + unregister(windowId: string): void { + this.windows.delete(windowId); + } + + get(windowId: string): BrowserWindow | null { + return this.windows.get(windowId) ?? null; + } + + getMain(): BrowserWindow | null { + return this.windows.get("main") ?? null; + } + + shouldWindowIdOwnSingletonEffects(windowId: string | null): boolean { + if (!windowId || windowId === "main") { + return true; + } + + if (!this.windows.has(windowId)) { + return false; + } + + const mainWindow = this.getMain(); + if (mainWindow && !mainWindow.isDestroyed()) { + return false; + } + + const fallbackOwnerId = Array.from(this.windows.entries()) + .filter( + ([windowId, window]) => windowId !== "main" && !window.isDestroyed(), + ) + .map(([windowId]) => windowId) + .sort()[0]; + + return fallbackOwnerId === windowId; + } + + getAll(): Map { + return new Map(this.windows); + } + + createTearoffWindow(options: TearoffWindowOptions): { + windowId: string; + window: BrowserWindow; + } { + const { windowId } = options; + + const initialVibrancyState = + appState.data?.vibrancyState ?? DEFAULT_VIBRANCY_STATE; + const vibrancyWindowOptions = getInitialVibrancyOptions( + initialVibrancyState, + nativeTheme.shouldUseDarkColors, + ); + + const window = createWindow({ + id: "tearoff", + title: "Superset", + width: options.width ?? 900, + height: options.height ?? 600, + x: Math.round(options.screenX - 100), + y: Math.round(options.screenY - 20), + minWidth: 400, + minHeight: 400, + show: false, + ...vibrancyWindowOptions, + frame: false, + titleBarStyle: "hidden", + trafficLightPosition: { x: 16, y: 16 }, + webPreferences: { + preload: join(__dirname, "../preload/index.js"), + webviewTag: true, + partition: "persist:superset", + additionalArguments: [`--tearoff-window-id=${windowId}`], + }, + }); + + this.register(windowId, window); + this.ipcHandler?.attachWindow(window); + + // Detach IPC BEFORE window is destroyed (close fires before closed) + window.on("close", () => { + this.ipcHandler?.detachWindow(window); + }); + window.on("closed", () => { + this.windows.delete(windowId); + }); + + // macOS Sequoia+: NSVisualEffectView can detach while the window is + // minimized in the Dock — the tearoff needs the same reshow guard as + // the main window or it restores opaque. + if (PLATFORM.IS_MAC) { + const reapplyVibrancyOnReshow = () => { + if (window.isDestroyed()) return; + applyVibrancy( + window, + appState.data?.vibrancyState ?? DEFAULT_VIBRANCY_STATE, + nativeTheme.shouldUseDarkColors, + ); + }; + window.on("restore", reapplyVibrancyOnReshow); + window.on("show", reapplyVibrancyOnReshow); + } + + window.webContents.once("did-finish-load", () => { + // Re-apply vibrancy now that the tearoff is on-screen so the + // native blur addon can find the NSVisualEffectView and write + // the user's persisted blurRadius. Without this the tearoff + // would stick to the default material blur until the user + // touched the vibrancy settings again. + applyVibrancy( + window, + appState.data?.vibrancyState ?? DEFAULT_VIBRANCY_STATE, + nativeTheme.shouldUseDarkColors, + ); + window.show(); + }); + + return { windowId, window }; + } + + /** + * Collect tabs from all open tearoff windows before the app quits. + * app.exit() bypasses beforeunload in renderers, so we must explicitly + * request state from each tearoff window via IPC. + */ + async collectAllTearoffTabs( + timeoutMs = 1500, + ): Promise }>> { + const tearoffEntries = Array.from(this.windows.entries()).filter( + ([id, win]) => id !== "main" && !win.isDestroyed(), + ); + + if (tearoffEntries.length === 0) return []; + + const promises = tearoffEntries.map( + ([windowId, win]) => + new Promise }>>( + (resolve) => { + const timer = setTimeout(() => { + ipcMain.removeAllListeners(`tearoff-state-collected-${windowId}`); + resolve([]); + }, timeoutMs); + + ipcMain.once( + `tearoff-state-collected-${windowId}`, + ( + _event, + data: Array<{ + tab: unknown; + panes: Record; + }>, + ) => { + clearTimeout(timer); + resolve(Array.isArray(data) ? data : []); + }, + ); + + if (!win.isDestroyed()) { + win.webContents.send("collect-tearoff-state", windowId); + } else { + clearTimeout(timer); + ipcMain.removeAllListeners(`tearoff-state-collected-${windowId}`); + resolve([]); + } + }, + ), + ); + + const results = await Promise.all(promises); + return results.flat(); + } + + broadcast(channel: string, ...args: unknown[]): void { + for (const window of this.windows.values()) { + if (!window.isDestroyed()) { + window.webContents.send(channel, ...args); + } + } + } +} + +export const windowManager = new WindowManager(); diff --git a/apps/desktop/src/main/lib/window-state/bounds-validation.test.ts b/apps/desktop/src/main/lib/window-state/bounds-validation.test.ts index 9bba586c621..687e47eeb39 100644 --- a/apps/desktop/src/main/lib/window-state/bounds-validation.test.ts +++ b/apps/desktop/src/main/lib/window-state/bounds-validation.test.ts @@ -302,6 +302,28 @@ describe("getInitialWindowBounds", () => { }); }); + describe("position restore disabled", () => { + it("should center while preserving saved size when position restore is disabled", () => { + const result = getInitialWindowBounds( + { + x: 100, + y: 200, + width: 800, + height: 600, + isMaximized: false, + }, + { restorePosition: false }, + ); + + expect(result).toEqual({ + width: 800, + height: 600, + center: true, + isMaximized: false, + }); + }); + }); + describe("dimension clamping", () => { it("should clamp width to work area size", () => { const result = getInitialWindowBounds({ diff --git a/apps/desktop/src/main/lib/window-state/bounds-validation.ts b/apps/desktop/src/main/lib/window-state/bounds-validation.ts index fa70718d97b..4d291dfbf85 100644 --- a/apps/desktop/src/main/lib/window-state/bounds-validation.ts +++ b/apps/desktop/src/main/lib/window-state/bounds-validation.ts @@ -77,6 +77,10 @@ export interface InitialWindowBounds { isMaximized: boolean; } +interface GetInitialWindowBoundsOptions { + restorePosition?: boolean; +} + /** * Computes initial window bounds from saved state, with fallbacks. * @@ -86,6 +90,7 @@ export interface InitialWindowBounds { */ export function getInitialWindowBounds( savedState: WindowState | null, + options: GetInitialWindowBoundsOptions = {}, ): InitialWindowBounds { const { workAreaSize } = getScreen().getPrimaryDisplay(); @@ -104,6 +109,15 @@ export function getInitialWindowBounds( savedState.height, ); + if (options.restorePosition === false) { + return { + width, + height, + center: true, + isMaximized: savedState.isMaximized, + }; + } + const savedBounds: Rectangle = { x: savedState.x, y: savedState.y, diff --git a/apps/desktop/src/main/lib/window-state/index.ts b/apps/desktop/src/main/lib/window-state/index.ts index dabdb477f1b..3ff0f897042 100644 --- a/apps/desktop/src/main/lib/window-state/index.ts +++ b/apps/desktop/src/main/lib/window-state/index.ts @@ -3,6 +3,10 @@ export { type InitialWindowBounds, isVisibleOnAnyDisplay, } from "./bounds-validation"; +export { + isWindowPositionPersistenceEnabled, + setWindowStateEnvironmentForTesting, +} from "./position-persistence"; export { isValidWindowState, loadWindowState, diff --git a/apps/desktop/src/main/lib/window-state/position-persistence.test.ts b/apps/desktop/src/main/lib/window-state/position-persistence.test.ts new file mode 100644 index 00000000000..dc38fe19db7 --- /dev/null +++ b/apps/desktop/src/main/lib/window-state/position-persistence.test.ts @@ -0,0 +1,55 @@ +import { afterEach, describe, expect, it } from "bun:test"; +import { + isWindowPositionPersistenceEnabled, + setWindowStateEnvironmentForTesting, +} from "./position-persistence"; + +afterEach(() => { + setWindowStateEnvironmentForTesting(null); +}); + +describe("isWindowPositionPersistenceEnabled", () => { + it("should disable position persistence on Linux Wayland", () => { + setWindowStateEnvironmentForTesting({ + platform: "linux", + env: { + XDG_SESSION_TYPE: "wayland", + }, + }); + + expect(isWindowPositionPersistenceEnabled()).toBe(false); + }); + + it("should disable position persistence when WAYLAND_DISPLAY is set", () => { + setWindowStateEnvironmentForTesting({ + platform: "linux", + env: { + WAYLAND_DISPLAY: "wayland-1", + }, + }); + + expect(isWindowPositionPersistenceEnabled()).toBe(false); + }); + + it("should keep position persistence on Linux X11", () => { + setWindowStateEnvironmentForTesting({ + platform: "linux", + env: { + XDG_SESSION_TYPE: "x11", + }, + }); + + expect(isWindowPositionPersistenceEnabled()).toBe(true); + }); + + it("should keep position persistence on non-Linux platforms", () => { + setWindowStateEnvironmentForTesting({ + platform: "darwin", + env: { + XDG_SESSION_TYPE: "wayland", + }, + }); + + expect(isWindowPositionPersistenceEnabled()).toBe(true); + }); +}); diff --git a/apps/desktop/src/main/lib/window-state/position-persistence.ts b/apps/desktop/src/main/lib/window-state/position-persistence.ts new file mode 100644 index 00000000000..255bd113308 --- /dev/null +++ b/apps/desktop/src/main/lib/window-state/position-persistence.ts @@ -0,0 +1,27 @@ +let platformOverride: NodeJS.Platform | null = null; +let envOverride: NodeJS.ProcessEnv | null = null; + +function getPlatform(): NodeJS.Platform { + return platformOverride ?? process.platform; +} + +function getEnv(): NodeJS.ProcessEnv { + return envOverride ?? process.env; +} + +export function setWindowStateEnvironmentForTesting( + override: { + platform?: NodeJS.Platform; + env?: NodeJS.ProcessEnv; + } | null, +): void { + platformOverride = override?.platform ?? null; + envOverride = override?.env ?? null; +} + +export function isWindowPositionPersistenceEnabled(): boolean { + if (getPlatform() !== "linux") return true; + + const env = getEnv(); + return env.XDG_SESSION_TYPE !== "wayland" && !env.WAYLAND_DISPLAY; +} diff --git a/apps/desktop/src/main/lib/workspace-media-protocol.ts b/apps/desktop/src/main/lib/workspace-media-protocol.ts new file mode 100644 index 00000000000..ea8874d737a --- /dev/null +++ b/apps/desktop/src/main/lib/workspace-media-protocol.ts @@ -0,0 +1,95 @@ +import { realpath } from "node:fs/promises"; +import { resolve, sep } from "node:path"; +import { projects, worktrees } from "@superset/local-db"; +import { + createFileProtocolResponse, + getMediaMimeType, + isSupportedMediaFile, +} from "./file-streaming"; +import { localDb } from "./local-db"; + +function decodePathFromUrl(url: URL): string | null { + // URL format: superset-workspace-media:/// + const raw = url.pathname.replace(/^\/+/, ""); + if (!raw) return null; + try { + const decoded = decodeURIComponent(raw); + if (decoded.startsWith("/") || /^[a-zA-Z]:[\\/]/.test(decoded)) { + return decoded; + } + return null; + } catch { + return null; + } +} + +export function encodeWorkspaceMediaUrl(absolutePath: string): string { + return `superset-workspace-media:///${encodeURIComponent(absolutePath)}`; +} + +/** + * Return the set of allowed workspace root paths (project main repos + worktrees), + * each fully resolved. Paths are resolved fresh per-request so new workspaces + * become available without an app restart. + */ +async function loadAllowedRoots(): Promise { + const rawRoots = [ + ...localDb.select({ path: projects.mainRepoPath }).from(projects).all(), + ...localDb.select({ path: worktrees.path }).from(worktrees).all(), + ] + .map((row) => row.path) + .filter( + (path): path is string => typeof path === "string" && path.length > 0, + ); + + const resolved = await Promise.all( + rawRoots.map(async (p) => { + try { + return await realpath(p); + } catch { + return resolve(p); + } + }), + ); + return Array.from(new Set(resolved)); +} + +function isWithinRoot(resolvedPath: string, root: string): boolean { + if (resolvedPath === root) return true; + const rootWithSep = root.endsWith(sep) ? root : root + sep; + return resolvedPath.startsWith(rootWithSep); +} + +export function createWorkspaceMediaProtocolHandler() { + return async (request: Request): Promise => { + const url = new URL(request.url); + const requestedPath = decodePathFromUrl(url); + if (!requestedPath) { + return new Response("Bad request", { status: 400 }); + } + + if (!isSupportedMediaFile(requestedPath)) { + return new Response("Forbidden", { status: 403 }); + } + + let resolvedPath: string; + try { + resolvedPath = await realpath(requestedPath); + } catch { + return new Response("Not found", { status: 404 }); + } + + const roots = await loadAllowedRoots(); + const withinWorkspace = roots.some((root) => + isWithinRoot(resolvedPath, root), + ); + if (!withinWorkspace) { + return new Response("Forbidden", { status: 403 }); + } + + return createFileProtocolResponse(request, resolvedPath, { + contentType: getMediaMimeType(resolvedPath) ?? "application/octet-stream", + cacheControl: "no-store", + }); + }; +} diff --git a/apps/desktop/src/main/lib/workspace-runtime/types.ts b/apps/desktop/src/main/lib/workspace-runtime/types.ts index 75a6da462ea..654827a893f 100644 --- a/apps/desktop/src/main/lib/workspace-runtime/types.ts +++ b/apps/desktop/src/main/lib/workspace-runtime/types.ts @@ -9,8 +9,9 @@ * 1. Stream subscriptions MUST NOT complete on session exit (exit is a state transition) * 2. Capability presence (e.g., management !== null) indicates feature availability, * not "health right now"; mid-session failures should propagate as errors - * 3. Operations use sync signatures where latency-critical (write, resize, signal, detach); - * async signatures for lifecycle ops (createOrAttach, kill, cleanup) + * 3. Operations stay sync where latency-critical by default (resize, signal, detach). + * `write` can opt into an ack-backed async path for critical one-shot commands; + * lifecycle ops remain async (createOrAttach, kill, cleanup). * * Reference: apps/desktop/plans/20260109-2313-terminal-runtime-abstraction-rewrite.md */ @@ -79,8 +80,18 @@ export interface TerminalSessionOperations { /** Cancel the current createOrAttach attempt for a pane if it matches requestId. */ cancelCreateOrAttach(params: { paneId: string; requestId: string }): void; - /** Write data to the terminal */ - write(params: { paneId: string; data: string }): void; + /** + * Write data to the terminal. + * `requireAck` opts into a confirmed async write for critical one-shot commands. + * `interactive` marks writes from direct user keyboard input; they bypass the + * shell-ready queue so prompts during initialization (e.g. oh-my-zsh update) work. + */ + write(params: { + paneId: string; + data: string; + requireAck?: boolean; + interactive?: boolean; + }): void | Promise; /** Resize the terminal */ resize(params: { paneId: string; cols: number; rows: number }): void; diff --git a/apps/desktop/src/main/lib/youtube-ringtone.ts b/apps/desktop/src/main/lib/youtube-ringtone.ts new file mode 100644 index 00000000000..15e853966bc --- /dev/null +++ b/apps/desktop/src/main/lib/youtube-ringtone.ts @@ -0,0 +1,782 @@ +import { spawn } from "node:child_process"; +import { randomUUID } from "node:crypto"; +import { EventEmitter } from "node:events"; +import { + existsSync, + constants as fsConstants, + mkdirSync, + readdirSync, + statSync, +} from "node:fs"; +import { access, rm } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import { delimiter, dirname, extname, join } from "node:path"; +import { getProcessEnvWithShellPath } from "lib/trpc/routers/workspaces/utils/shell-env"; +import { + type CustomRingtoneInfo, + importCustomRingtoneFromPath, + saveCustomRingtoneSource, + setCustomRingtoneDisplayName, + updateCustomRingtoneEditState, +} from "./custom-ringtones"; +import { + getTempAudioPath, + registerTempAudio, + unregisterTempAudio, +} from "./temp-audio-protocol"; + +const MAX_CLIP_DURATION_SECONDS = 30; +const YT_DLP_TIMEOUT_MS = 120_000; +const FULL_DOWNLOAD_TIMEOUT_MS = 300_000; +const MAX_FULL_DOWNLOAD_DURATION_SECONDS = 600; +const REQUIRED_BINARIES = ["yt-dlp", "ffmpeg", "ffprobe"] as const; +type RequiredBinary = (typeof REQUIRED_BINARIES)[number]; +const ALLOWED_OUTPUT_EXTENSIONS = new Set([ + ".mp3", + ".wav", + ".ogg", + ".m4a", + ".aac", + ".opus", + ".webm", +]); + +const FALLBACK_SEARCH_DIRS = [ + "/opt/homebrew/bin", + "/opt/homebrew/sbin", + "/usr/local/bin", + "/usr/local/sbin", + "/usr/bin", + "/usr/sbin", + "/bin", + "/sbin", +]; + +export interface VideoInfo { + title: string; + thumbnailUrl: string; + durationSeconds: number; +} + +export interface DownloadedAudio { + tempId: string; + tempPath: string; + info: VideoInfo; +} + +export interface ImportFromYouTubeOptions { + url: string; + startSeconds: number; + endSeconds: number; + displayName?: string; + thumbnailUrl?: string; + fadeInSeconds?: number; + fadeOutSeconds?: number; + playbackRate?: number; + /** If provided, skip yt-dlp download and use this temp file instead */ + tempFilePath?: string; + /** Original video title (YouTube). Stored so re-edit can show it in the UI. */ + sourceTitle?: string; +} + +export class YouTubeRingtoneError extends Error { + constructor( + message: string, + public readonly code: + | "BINARY_MISSING" + | "INVALID_URL" + | "INVALID_RANGE" + | "DOWNLOAD_FAILED" + | "TIMEOUT" + | "VIDEO_TOO_LONG", + ) { + super(message); + this.name = "YouTubeRingtoneError"; + } +} + +const YOUTUBE_URL_PATTERN = + /^https?:\/\/(?:www\.|m\.|music\.)?(?:youtube\.com\/(?:watch\?v=|shorts\/|embed\/|live\/)[\w-]+|youtu\.be\/[\w-]+)/i; + +export function isLikelyYouTubeUrl(url: string): boolean { + return YOUTUBE_URL_PATTERN.test(url.trim()); +} + +async function isExecutable(path: string): Promise { + try { + await access(path, fsConstants.X_OK); + return statSync(path).isFile(); + } catch { + return false; + } +} + +async function resolveBinaryPath( + binary: string, + env: NodeJS.ProcessEnv, +): Promise { + const searchDirs = new Set(); + const pathEnv = env.PATH ?? env.Path ?? ""; + for (const dir of pathEnv.split(delimiter)) { + if (dir) searchDirs.add(dir); + } + if (process.platform === "darwin" || process.platform === "linux") { + for (const dir of FALLBACK_SEARCH_DIRS) searchDirs.add(dir); + } + + for (const dir of searchDirs) { + const candidate = join(dir, binary); + if (await isExecutable(candidate)) { + return candidate; + } + } + return null; +} + +async function resolveRequiredBinaries( + env: NodeJS.ProcessEnv, +): Promise> { + const entries = await Promise.all( + REQUIRED_BINARIES.map(async (bin) => { + const path = await resolveBinaryPath(bin, env); + return [bin, path] as const; + }), + ); + + const missing = entries.filter(([, p]) => !p).map(([name]) => name); + if (missing.length > 0) { + const brewTargets = + missing.filter((b) => b !== "ffprobe").join(" ") || "yt-dlp ffmpeg"; + throw new YouTubeRingtoneError( + `Missing required tool(s): ${missing.join(", ")}. Install with \`brew install ${brewTargets}\` (macOS, ffprobe ships with ffmpeg) or your platform's package manager. If already installed, make sure it is on your login-shell PATH.`, + "BINARY_MISSING", + ); + } + + const resolved = Object.fromEntries(entries) as Record< + RequiredBinary, + string + >; + return resolved; +} + +export async function checkMissingBinaries(): Promise { + const shellEnv = await getProcessEnvWithShellPath(); + const entries = await Promise.all( + REQUIRED_BINARIES.map(async (bin) => { + const path = await resolveBinaryPath(bin, shellEnv); + return [bin, path] as const; + }), + ); + return entries.filter(([, p]) => !p).map(([name]) => name); +} + +interface InstallEventBase { + installId: string; + seq: number; + time: number; +} + +export type InstallProgressEvent = + | (InstallEventBase & { + type: "log"; + message: string; + level: "info" | "warn" | "error"; + stream: "stdout" | "stderr" | "system"; + }) + | (InstallEventBase & { type: "done" }) + | (InstallEventBase & { type: "error"; message: string }); + +const installEventBus = new EventEmitter(); +installEventBus.setMaxListeners(0); + +const installEventBuffers = new Map(); +const installBufferEvictTimers = new Map(); +const installSeqCounters = new Map(); +const INSTALL_MAX_BUFFERED_EVENTS = 1000; +const INSTALL_TERMINAL_EVICT_MS = 30_000; + +function isTerminalInstallEvent(event: InstallProgressEvent): boolean { + return event.type === "done" || event.type === "error"; +} + +function nextInstallSeq(installId: string): number { + const next = (installSeqCounters.get(installId) ?? 0) + 1; + installSeqCounters.set(installId, next); + return next; +} + +type DistributiveOmit< + T, + K extends keyof InstallProgressEvent, +> = T extends unknown ? Omit : never; +type InstallEventInput = DistributiveOmit; + +function emitInstallEvent(input: InstallEventInput): void { + const event = { + ...input, + seq: nextInstallSeq(input.installId), + } as InstallProgressEvent; + let buffer = installEventBuffers.get(event.installId); + if (!buffer) { + buffer = []; + installEventBuffers.set(event.installId, buffer); + } + buffer.push(event); + if (buffer.length > INSTALL_MAX_BUFFERED_EVENTS) { + buffer.splice(0, buffer.length - INSTALL_MAX_BUFFERED_EVENTS); + } + installEventBus.emit(event.installId, event); + + if (isTerminalInstallEvent(event)) { + const existing = installBufferEvictTimers.get(event.installId); + if (existing) clearTimeout(existing); + const timer = setTimeout(() => { + installEventBuffers.delete(event.installId); + installBufferEvictTimers.delete(event.installId); + installSeqCounters.delete(event.installId); + }, INSTALL_TERMINAL_EVICT_MS); + installBufferEvictTimers.set(event.installId, timer); + } +} + +function emitInstallLog( + installId: string, + message: string, + level: "info" | "warn" | "error", + stream: "stdout" | "stderr" | "system", +): void { + for (const line of message.split(/\r?\n/)) { + const trimmed = line.trimEnd(); + if (!trimmed) continue; + emitInstallEvent({ + type: "log", + installId, + time: Date.now(), + message: trimmed, + level, + stream, + }); + } +} + +export function subscribeInstallEvents( + installId: string, + listener: (event: InstallProgressEvent) => void, +): () => void { + installEventBus.on(installId, listener); + return () => installEventBus.off(installId, listener); +} + +export function getBufferedInstallEvents( + installId: string, +): InstallProgressEvent[] { + return installEventBuffers.get(installId)?.slice() ?? []; +} + +export async function installMissingBinaries(installId: string): Promise { + if (process.platform !== "darwin") { + const msg = + "Auto-install is only supported on macOS. Please install yt-dlp and ffmpeg manually."; + emitInstallLog(installId, msg, "error", "system"); + emitInstallEvent({ + type: "error", + installId, + time: Date.now(), + message: msg, + }); + throw new Error(msg); + } + + emitInstallLog(installId, "Resolving Homebrew path…", "info", "system"); + const shellEnv = await getProcessEnvWithShellPath(); + const brewPath = await resolveBinaryPath("brew", shellEnv); + if (!brewPath) { + const msg = + "Homebrew is not installed. Please install it from https://brew.sh and then install yt-dlp and ffmpeg."; + emitInstallLog(installId, msg, "error", "system"); + emitInstallEvent({ + type: "error", + installId, + time: Date.now(), + message: msg, + }); + throw new Error(msg); + } + + emitInstallLog( + installId, + `$ ${brewPath} install yt-dlp ffmpeg`, + "info", + "system", + ); + + try { + await new Promise((resolve, reject) => { + const proc = spawn(brewPath, ["install", "yt-dlp", "ffmpeg"], { + env: shellEnv, + stdio: ["ignore", "pipe", "pipe"], + }); + + const timer = setTimeout(() => { + proc.kill("SIGKILL"); + reject(new Error("Installation timed out after 10 minutes.")); + }, 600_000); + + let stderrTail = ""; + proc.stdout?.on("data", (chunk: Buffer) => { + emitInstallLog(installId, chunk.toString(), "info", "stdout"); + }); + proc.stderr?.on("data", (chunk: Buffer) => { + const text = chunk.toString(); + stderrTail = (stderrTail + text).split("\n").slice(-20).join("\n"); + // brew writes progress banners to stderr; show as info by default + emitInstallLog(installId, text, "info", "stderr"); + }); + + proc.on("error", (err) => { + clearTimeout(timer); + reject(new Error(`Failed to run brew: ${err.message}`)); + }); + + proc.on("exit", (code) => { + clearTimeout(timer); + if (code === 0) { + resolve(); + } else { + const msg = stderrTail.trim().split("\n").slice(-3).join("\n"); + reject( + new Error(msg || `brew install exited with code ${code ?? "?"}`), + ); + } + }); + }); + + emitInstallLog(installId, "Installation complete.", "info", "system"); + emitInstallEvent({ type: "done", installId, time: Date.now() }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + emitInstallLog(installId, message, "error", "system"); + emitInstallEvent({ + type: "error", + installId, + time: Date.now(), + message, + }); + throw err; + } +} + +function runProcess( + binaryPath: string, + args: string[], + cwd: string, + env: NodeJS.ProcessEnv, + timeoutMs: number, +): Promise { + return new Promise((resolve, reject) => { + const proc = spawn(binaryPath, args, { + cwd, + env, + stdio: ["ignore", "pipe", "pipe"], + }); + + let stdout = ""; + let stderr = ""; + proc.stdout?.on("data", (chunk: Buffer) => { + stdout += chunk.toString(); + }); + proc.stderr?.on("data", (chunk: Buffer) => { + stderr += chunk.toString(); + }); + + const timer = setTimeout(() => { + proc.kill("SIGKILL"); + reject(new YouTubeRingtoneError("Process timed out.", "TIMEOUT")); + }, timeoutMs); + + proc.on("error", (error) => { + clearTimeout(timer); + reject( + new YouTubeRingtoneError( + `Failed to launch process: ${error.message}`, + "DOWNLOAD_FAILED", + ), + ); + }); + + proc.on("exit", (code) => { + clearTimeout(timer); + if (code === 0) { + resolve(stdout); + } else { + const trimmed = stderr.trim().split("\n").slice(-3).join("\n"); + reject( + new YouTubeRingtoneError( + trimmed || `Process exited with code ${code ?? "?"}`, + "DOWNLOAD_FAILED", + ), + ); + } + }); + }); +} + +function findProducedAudio(workDir: string): string | null { + if (!existsSync(workDir)) return null; + const candidates = readdirSync(workDir) + .filter((name) => + ALLOWED_OUTPUT_EXTENSIONS.has(extname(name).toLowerCase()), + ) + .map((name) => join(workDir, name)) + .filter((p) => { + try { + return statSync(p).isFile() && statSync(p).size > 0; + } catch { + return false; + } + }); + + if (candidates.length === 0) return null; + candidates.sort((a, b) => statSync(b).mtimeMs - statSync(a).mtimeMs); + return candidates[0] ?? null; +} + +export async function fetchYouTubeVideoInfo(url: string): Promise { + const trimmedUrl = url.trim(); + if (!isLikelyYouTubeUrl(trimmedUrl)) { + throw new YouTubeRingtoneError( + "Please enter a valid YouTube URL (youtube.com or youtu.be).", + "INVALID_URL", + ); + } + + const shellEnv = await getProcessEnvWithShellPath(); + const resolved = await resolveRequiredBinaries(shellEnv); + + const workDir = join(tmpdir(), `superset-ytinfo-${randomUUID()}`); + mkdirSync(workDir, { recursive: true }); + + try { + const jsonOutput = await runProcess( + resolved["yt-dlp"], + ["--dump-single-json", "--no-playlist", "--no-warnings", trimmedUrl], + workDir, + shellEnv, + YT_DLP_TIMEOUT_MS, + ); + + const data = JSON.parse(jsonOutput) as { + title?: string; + duration?: number; + thumbnail?: string; + }; + + return { + title: data.title?.trim() || "YouTube Video", + thumbnailUrl: data.thumbnail || "", + durationSeconds: data.duration ?? 0, + }; + } finally { + await rm(workDir, { recursive: true, force: true }).catch(() => {}); + } +} + +export async function downloadFullYouTubeAudio( + url: string, +): Promise { + const trimmedUrl = url.trim(); + if (!isLikelyYouTubeUrl(trimmedUrl)) { + throw new YouTubeRingtoneError( + "Please enter a valid YouTube URL (youtube.com or youtu.be).", + "INVALID_URL", + ); + } + + const shellEnv = await getProcessEnvWithShellPath(); + const resolved = await resolveRequiredBinaries(shellEnv); + + const ffmpegDir = dirname(resolved.ffmpeg); + const existingPath = shellEnv.PATH ?? shellEnv.Path ?? ""; + const pathEntries = existingPath.split(delimiter).filter(Boolean); + if (!pathEntries.includes(ffmpegDir)) { + pathEntries.unshift(ffmpegDir); + } + const spawnEnv: NodeJS.ProcessEnv = { + ...shellEnv, + PATH: pathEntries.join(delimiter), + }; + + const workDir = join(tmpdir(), `superset-ytfull-${randomUUID()}`); + mkdirSync(workDir, { recursive: true }); + const outputTemplate = join(workDir, "audio.%(ext)s"); + + // Single yt-dlp invocation: fetch metadata AND download the audio-only + // stream. Skipping `-x --audio-format mp3` avoids a multi-second ffmpeg + // re-encode — the browser and ffmpeg both decode m4a/webm natively, and + // we can re-encode to mp3 only at the final import step. + const args = [ + "--no-playlist", + "--no-warnings", + "--match-filter", + `duration <= ${MAX_FULL_DOWNLOAD_DURATION_SECONDS}`, + "-f", + "bestaudio[ext=m4a]/bestaudio[ext=webm]/bestaudio", + "--concurrent-fragments", + "5", + "--ffmpeg-location", + ffmpegDir, + "--print-json", + "--no-simulate", + "-o", + outputTemplate, + trimmedUrl, + ]; + + let info: VideoInfo; + try { + const jsonOutput = await runProcess( + resolved["yt-dlp"], + args, + workDir, + spawnEnv, + FULL_DOWNLOAD_TIMEOUT_MS, + ); + // --print-json can emit multiple JSON lines; find the one with title. + const lastJsonLine = jsonOutput + .split("\n") + .map((l) => l.trim()) + .filter((l) => l.startsWith("{") && l.endsWith("}")) + .pop(); + const data = lastJsonLine + ? (JSON.parse(lastJsonLine) as { + title?: string; + duration?: number; + thumbnail?: string; + }) + : {}; + info = { + title: data.title?.trim() || "YouTube Video", + thumbnailUrl: data.thumbnail || "", + durationSeconds: data.duration ?? 0, + }; + } catch (err) { + await rm(workDir, { recursive: true, force: true }).catch(() => {}); + if (err instanceof YouTubeRingtoneError && err.code === "DOWNLOAD_FAILED") { + const msg = err.message.toLowerCase(); + if (msg.includes("does not pass filter") || msg.includes("duration")) { + throw new YouTubeRingtoneError( + `Video is too long. Maximum supported duration is ${MAX_FULL_DOWNLOAD_DURATION_SECONDS / 60} minutes.`, + "VIDEO_TOO_LONG", + ); + } + } + throw err; + } + + const producedPath = findProducedAudio(workDir); + if (!producedPath) { + await rm(workDir, { recursive: true, force: true }).catch(() => {}); + throw new YouTubeRingtoneError( + "yt-dlp did not produce an audio file. The video may be unavailable or restricted.", + "DOWNLOAD_FAILED", + ); + } + + const tempId = randomUUID(); + registerTempAudio(tempId, producedPath); + + return { tempId, tempPath: producedPath, info }; +} + +export async function cleanupTempAudio(tempId: string): Promise { + const filePath = getTempAudioPath(tempId); + unregisterTempAudio(tempId); + if (filePath) { + const dir = dirname(filePath); + await rm(dir, { recursive: true, force: true }).catch(() => {}); + } +} + +export async function importRingtoneFromYouTube( + options: ImportFromYouTubeOptions, +): Promise { + const url = options.url.trim(); + + if (!options.tempFilePath && !isLikelyYouTubeUrl(url)) { + throw new YouTubeRingtoneError( + "Please enter a valid YouTube URL (youtube.com or youtu.be).", + "INVALID_URL", + ); + } + + const startSeconds = Math.max(0, options.startSeconds); + const endSeconds = options.endSeconds; + const playbackRate = Math.max( + 0.5, + Math.min(2.0, options.playbackRate ?? 1.0), + ); + const rawDuration = endSeconds - startSeconds; + const outputDuration = rawDuration / playbackRate; + + if (!Number.isFinite(rawDuration) || rawDuration <= 0) { + throw new YouTubeRingtoneError( + "End time must be greater than start time.", + "INVALID_RANGE", + ); + } + + if (outputDuration > MAX_CLIP_DURATION_SECONDS) { + throw new YouTubeRingtoneError( + `Output clip duration (${outputDuration.toFixed(1)}s) exceeds the maximum of ${MAX_CLIP_DURATION_SECONDS} seconds.`, + "INVALID_RANGE", + ); + } + + const shellEnv = await getProcessEnvWithShellPath(); + const resolved = await resolveRequiredBinaries(shellEnv); + + const ffmpegDir = dirname(resolved.ffmpeg); + const existingPath = shellEnv.PATH ?? shellEnv.Path ?? ""; + const pathEntries = existingPath.split(delimiter).filter(Boolean); + if (!pathEntries.includes(ffmpegDir)) { + pathEntries.unshift(ffmpegDir); + } + const spawnEnv: NodeJS.ProcessEnv = { + ...shellEnv, + PATH: pathEntries.join(delimiter), + }; + + const workDir = join(tmpdir(), `superset-yt-${randomUUID()}`); + mkdirSync(workDir, { recursive: true }); + + try { + let inputPath: string; + + if (options.tempFilePath) { + inputPath = options.tempFilePath; + } else { + // Legacy: download section via yt-dlp + const outputTemplate = join(workDir, "clip.%(ext)s"); + + const sectionSpec = `*${startSeconds}-${endSeconds}`; + const ytArgs = [ + "--no-playlist", + "--no-warnings", + "--quiet", + "-x", + "--audio-format", + "mp3", + "--audio-quality", + "5", + "--download-sections", + sectionSpec, + "--force-keyframes-at-cuts", + "--ffmpeg-location", + ffmpegDir, + "-o", + outputTemplate, + url, + ]; + + await runProcess( + resolved["yt-dlp"], + ytArgs, + workDir, + spawnEnv, + YT_DLP_TIMEOUT_MS, + ); + + const downloaded = findProducedAudio(workDir); + if (!downloaded) { + throw new YouTubeRingtoneError( + "yt-dlp did not produce an audio file. The video may be unavailable or restricted.", + "DOWNLOAD_FAILED", + ); + } + inputPath = downloaded; + } + + // Build ffmpeg filter chain + const filters: string[] = []; + if (playbackRate !== 1.0) { + filters.push(`atempo=${playbackRate.toFixed(3)}`); + } + const fadeIn = options.fadeInSeconds ?? 0; + const fadeOut = options.fadeOutSeconds ?? 0; + if (fadeIn > 0) { + filters.push(`afade=t=in:st=0:d=${fadeIn.toFixed(3)}`); + } + if (fadeOut > 0) { + const fadeOutStart = Math.max(0, outputDuration - fadeOut); + filters.push( + `afade=t=out:st=${fadeOutStart.toFixed(3)}:d=${fadeOut.toFixed(3)}`, + ); + } + + const outputPath = join(workDir, `output_${randomUUID()}.mp3`); + const ffmpegArgs: string[] = []; + + if (options.tempFilePath) { + // Input-side seek: ffmpeg is fast AND frame-accurate, and it skips + // MP3 decoder priming so the cut lines up with the browser preview. + ffmpegArgs.push("-ss", startSeconds.toFixed(3)); + } + + ffmpegArgs.push("-i", inputPath); + + if (options.tempFilePath) { + ffmpegArgs.push("-t", rawDuration.toFixed(3)); + } + + if (filters.length > 0) { + ffmpegArgs.push("-af", filters.join(",")); + } + + ffmpegArgs.push("-acodec", "libmp3lame", "-q:a", "5", "-y", outputPath); + + await runProcess( + resolved.ffmpeg, + ffmpegArgs, + workDir, + spawnEnv, + YT_DLP_TIMEOUT_MS, + ); + + const result = await importCustomRingtoneFromPath(outputPath, { + displayName: options.displayName?.trim() || undefined, + thumbnailUrl: options.thumbnailUrl, + }); + + // Persist the source audio + edit parameters so the user can re-open + // the clip editor later and adjust the trim/fade/speed without + // re-downloading from YouTube. + if (options.tempFilePath) { + try { + await saveCustomRingtoneSource(options.tempFilePath); + updateCustomRingtoneEditState({ + startSeconds, + endSeconds, + fadeInSeconds: options.fadeInSeconds, + fadeOutSeconds: options.fadeOutSeconds, + playbackRate, + sourceTitle: options.sourceTitle, + sourceUrl: options.url, + }); + } catch (err) { + console.error("Failed to persist ringtone source for re-edit:", err); + } + } + + return result; + } finally { + await rm(workDir, { recursive: true, force: true }).catch(() => {}); + } +} + +export { setCustomRingtoneDisplayName }; + +export const YOUTUBE_RINGTONE_LIMITS = { + maxDurationSeconds: MAX_CLIP_DURATION_SECONDS, + maxFullDownloadDurationSeconds: MAX_FULL_DOWNLOAD_DURATION_SECONDS, +}; diff --git a/apps/desktop/src/main/terminal-host/debug.ts b/apps/desktop/src/main/terminal-host/debug.ts new file mode 100644 index 00000000000..2cb81484504 --- /dev/null +++ b/apps/desktop/src/main/terminal-host/debug.ts @@ -0,0 +1,14 @@ +import { createMainDebugChannel } from "../lib/debug-channel"; + +const DEBUG_TERMINAL = process.env.SUPERSET_TERMINAL_DEBUG === "1"; + +// terminal host のログは、再起動前提の再現を避けるため +// Sentry には常時送る。 +// これにより renderer 側の停止、hidden terminal の滞留、 +// PTY/emulator の backpressure を事後に追いやすくする。 +// env フラグは同じ内容を console にも出すかだけを制御する。 +export const terminalHostDebug = createMainDebugChannel({ + namespace: "terminal.host", + enabled: true, + mirrorToConsole: DEBUG_TERMINAL, +}); diff --git a/apps/desktop/src/main/terminal-host/pty-subprocess.ts b/apps/desktop/src/main/terminal-host/pty-subprocess.ts index 5778288dc84..6ca5241d664 100644 --- a/apps/desktop/src/main/terminal-host/pty-subprocess.ts +++ b/apps/desktop/src/main/terminal-host/pty-subprocess.ts @@ -57,7 +57,7 @@ const INPUT_QUEUE_HARD_LIMIT_BYTES = 64 * 1024 * 1024; // 64MB let outputChunks: string[] = []; let outputBytesQueued = 0; let outputFlushScheduled = false; -const OUTPUT_FLUSH_INTERVAL_MS = 16; // Match terminal-style frame batching (~60fps) +const OUTPUT_FLUSH_INTERVAL_MS = 0; const MAX_OUTPUT_BATCH_SIZE_BYTES = 128 * 1024; // 128KB max per flush // Backpressure - track if stdout is draining diff --git a/apps/desktop/src/main/terminal-host/session.ts b/apps/desktop/src/main/terminal-host/session.ts index 67148e927f8..412a631a95f 100644 --- a/apps/desktop/src/main/terminal-host/session.ts +++ b/apps/desktop/src/main/terminal-host/session.ts @@ -36,6 +36,7 @@ import type { TerminalSnapshot, } from "../lib/terminal-host/types"; import { treeKillAsync } from "../lib/tree-kill"; +import { terminalHostDebug } from "./debug"; import { createFrameHeader, PtySubprocessFrameDecoder, @@ -81,6 +82,37 @@ const EMULATOR_WRITE_QUEUE_LOW_WATERMARK_BYTES = 250_000; */ const SHELL_READY_TIMEOUT_MS = 15_000; +/** + * Coalesce data broadcasts to reduce per-event IPC overhead + * (JSON.stringify + socket.write) and downstream renderer xterm.write cost + * when the PTY emits high-frequency full-screen redraws (e.g. Codex TUI in + * Ratatui which rewrites the whole screen 30-60 times per second). + * + * Flushed when either the interval elapses or the buffered byte count + * exceeds the threshold, whichever comes first. Non-data events + * (exit/error) and boundary events (attach) force an immediate flush so + * event ordering and snapshot consistency are preserved. + * + * Disable by setting SUPERSET_TERMINAL_BROADCAST_COALESCE=0. + */ +const BROADCAST_COALESCE_INTERVAL_MS = 0; +const BROADCAST_COALESCE_MAX_BYTES = 131_072; +const BROADCAST_COALESCE_ENABLED = + process.env.SUPERSET_TERMINAL_BROADCAST_COALESCE !== "0"; + +/** + * Coalesce consecutive queued emulator chunks into a single emulator.write() + * call per drain iteration. For high-frequency producers like Codex/Ratatui + * the emulator queue accumulates many small chunks in a single tick; + * concatenating them reduces ANSI parser setup overhead and function-call + * churn. The MAX_CHUNK_CHARS cap (below) is still honored so we never grow + * a single write beyond what the emulator already tolerates. + * + * Disable by setting SUPERSET_TERMINAL_EMULATOR_COALESCE=0. + */ +const EMULATOR_WRITE_COALESCE_ENABLED = + process.env.SUPERSET_TERMINAL_EMULATOR_COALESCE !== "0"; + /** * Shell readiness lifecycle: * - `pending` — shell is initializing; escape sequences dropped, other writes pass through @@ -173,6 +205,11 @@ export class Session { private emulatorWriteScheduled = false; private emulatorFlushWaiters: Array<() => void> = []; + // Broadcast data coalescing — see BROADCAST_COALESCE_* constants. + private pendingBroadcastChunks: string[] = []; + private pendingBroadcastBytes = 0; + private broadcastCoalesceTimer: ReturnType | null = null; + // Snapshot boundary tracking for concurrent attaches. private emulatorWriteProcessedItems = 0; private nextSnapshotBoundaryWaiterId = 1; @@ -374,11 +411,7 @@ export class Session { if (data.length === 0) break; this.enqueueEmulatorWrite(data); - - this.broadcastEvent("data", { - type: "data", - data, - } satisfies TerminalDataEvent); + this.queueBroadcastData(data); break; } @@ -387,6 +420,7 @@ export class Session { const signal = payload.length >= 8 ? payload.readInt32LE(4) : 0; this.exitCode = exitCode; + this.flushPendingBroadcastData(); this.broadcastEvent("exit", { type: "exit", exitCode, @@ -412,6 +446,7 @@ export class Session { errorMessage, ); + this.flushPendingBroadcastData(); this.broadcastEvent("error", { type: "error", error: errorMessage, @@ -431,6 +466,7 @@ export class Session { if (this.exitCode === null) { this.exitCode = exitCode; + this.flushPendingBroadcastData(); this.broadcastEvent("exit", { type: "exit", exitCode, @@ -496,6 +532,7 @@ export class Session { console.warn( `[Session ${this.sessionId}] stdin queue full (${this.subprocessStdinQueuedBytes} bytes), dropping frame`, ); + this.flushPendingBroadcastData(); this.broadcastEvent("error", { type: "error", error: "Write queue full - input dropped", @@ -620,11 +657,16 @@ export class Session { while (this.emulatorWriteQueue.length > 0) { if (performance.now() - start > budgetMs) break; - let chunk = this.emulatorWriteQueue[0]; - if (chunk.length > MAX_CHUNK_CHARS) { + const head = this.emulatorWriteQueue[0]; + + // Oversized head — split at MAX_CHUNK_CHARS (respecting surrogate + // pairs) and write a single slice. No coalescing possible here + // since the remainder must stay at the queue head for the next + // iteration to preserve FIFO order. + if (head.length > MAX_CHUNK_CHARS) { let splitAt = MAX_CHUNK_CHARS; - const prev = chunk.charCodeAt(splitAt - 1); - const next = chunk.charCodeAt(splitAt); + const prev = head.charCodeAt(splitAt - 1); + const next = head.charCodeAt(splitAt); if ( prev >= 0xd800 && prev <= 0xdbff && @@ -633,16 +675,42 @@ export class Session { ) { splitAt--; } - this.emulatorWriteQueue[0] = chunk.slice(splitAt); - chunk = chunk.slice(0, splitAt); + this.emulatorWriteQueue[0] = head.slice(splitAt); + const chunk = head.slice(0, splitAt); + this.emulatorWriteQueuedBytes -= Buffer.byteLength(chunk, "utf8"); + this.emulator.write(chunk); + continue; + } + + // Coalesce consecutive in-bounds items into a single write while + // staying under MAX_CHUNK_CHARS. Each consumed item still counts + // as one processed item so snapshot-boundary targets match the + // pre-coalescing item accounting used by flushToSnapshotBoundary. + if (EMULATOR_WRITE_COALESCE_ENABLED) { + let merged = ""; + let itemsConsumed = 0; + while ( + this.emulatorWriteQueue.length > 0 && + merged.length + this.emulatorWriteQueue[0].length <= MAX_CHUNK_CHARS + ) { + const nextChunk = this.emulatorWriteQueue.shift() as string; + merged += nextChunk; + itemsConsumed++; + } + + if (itemsConsumed === 0) break; // defensive — should not happen + + this.emulatorWriteProcessedItems += itemsConsumed; + this.resolveReachedSnapshotBoundaryWaiters(); + this.emulatorWriteQueuedBytes -= Buffer.byteLength(merged, "utf8"); + this.emulator.write(merged); } else { this.emulatorWriteQueue.shift(); this.emulatorWriteProcessedItems++; this.resolveReachedSnapshotBoundaryWaiters(); + this.emulatorWriteQueuedBytes -= Buffer.byteLength(head, "utf8"); + this.emulator.write(head); } - - this.emulatorWriteQueuedBytes -= Buffer.byteLength(chunk, "utf8"); - this.emulator.write(chunk); } this.maybeResumeSubprocessStdoutForEmulatorBackpressure(); @@ -787,6 +855,12 @@ export class Session { } throwIfAborted(signal); + // Drain any pending coalesced data to existing clients before the new + // client joins. Otherwise the new client would receive pre-attach + // bytes that are already captured in the snapshot below, causing + // duplicated output (double-advance of cursor, etc.) on the renderer. + this.flushPendingBroadcastData(); + const attachedClient: AttachedClient = { socket, attachedAt: Date.now(), @@ -857,7 +931,7 @@ export class Session { * shell init commands (e.g. fnm's `use-on-cd` hook) opened an interactive * prompt before the OSC 133;A marker fired. See #3478. */ - write(data: string): void { + write(data: string, _options?: { interactive?: boolean }): void { if (!this.subprocess || !this.subprocessReady) { throw new Error("PTY not spawned"); } @@ -1011,6 +1085,12 @@ export class Session { const waiters = this.emulatorFlushWaiters; this.emulatorFlushWaiters = []; for (const resolve of waiters) resolve(); + + // Flush before dropping the coalesce buffer — resolveShellReady can + // enqueue held scanner bytes during teardown (short-lived shells that + // exit before the ready marker completes), and clearing without + // flushing would silently drop them. + this.flushPendingBroadcastData(); } /** @@ -1040,13 +1120,68 @@ export class Session { // Flush held marker bytes — they weren't part of a full marker if (this.scanState.heldBytes.length > 0) { this.enqueueEmulatorWrite(this.scanState.heldBytes); + this.queueBroadcastData(this.scanState.heldBytes); + this.scanState.heldBytes = ""; + } + this.scanState.matchPos = 0; + } + + /** + * Buffer a data event for coalesced broadcast. See BROADCAST_COALESCE_* + * constants for rationale. Falls back to immediate broadcast when + * coalescing is disabled via env var or the session is already disposed. + */ + private queueBroadcastData(data: string): void { + if (data.length === 0) return; + + if (!BROADCAST_COALESCE_ENABLED || this.disposed) { this.broadcastEvent("data", { type: "data", - data: this.scanState.heldBytes, + data, } satisfies TerminalDataEvent); - this.scanState.heldBytes = ""; + return; + } + + this.pendingBroadcastChunks.push(data); + this.pendingBroadcastBytes += Buffer.byteLength(data, "utf8"); + + if (this.pendingBroadcastBytes >= BROADCAST_COALESCE_MAX_BYTES) { + this.flushPendingBroadcastData(); + return; + } + + if (!this.broadcastCoalesceTimer) { + this.broadcastCoalesceTimer = setTimeout(() => { + this.broadcastCoalesceTimer = null; + this.flushPendingBroadcastData(); + }, BROADCAST_COALESCE_INTERVAL_MS); } - this.scanState.matchPos = 0; + } + + /** + * Emit any buffered data chunks as a single merged broadcast. Safe to + * call when the buffer is empty. Must be called before any non-data + * event (exit/error) and before attaching a new client so ordering and + * snapshot consistency are preserved. + */ + private flushPendingBroadcastData(): void { + if (this.broadcastCoalesceTimer) { + clearTimeout(this.broadcastCoalesceTimer); + this.broadcastCoalesceTimer = null; + } + if (this.pendingBroadcastChunks.length === 0) return; + + const merged = + this.pendingBroadcastChunks.length === 1 + ? this.pendingBroadcastChunks[0] + : this.pendingBroadcastChunks.join(""); + this.pendingBroadcastChunks = []; + this.pendingBroadcastBytes = 0; + + this.broadcastEvent("data", { + type: "data", + data: merged, + } satisfies TerminalDataEvent); } /** @@ -1106,6 +1241,18 @@ export class Session { } this.emulatorWriteBackpressured = true; + terminalHostDebug.warn( + "emulator-backpressure-paused", + { + sessionId: this.sessionId, + queuedBytes: this.emulatorWriteQueuedBytes, + clientCount: this.attachedClients.size, + }, + { + captureMessage: true, + fingerprint: ["terminal.host", "emulator-backpressure-paused"], + }, + ); console.warn( `[Session ${this.sessionId}] Emulator backlog reached ${this.emulatorWriteQueuedBytes} bytes, pausing PTY reads`, ); @@ -1121,6 +1268,18 @@ export class Session { } this.emulatorWriteBackpressured = false; + terminalHostDebug.info( + "emulator-backpressure-resumed", + { + sessionId: this.sessionId, + queuedBytes: this.emulatorWriteQueuedBytes, + clientCount: this.attachedClients.size, + }, + { + captureMessage: true, + fingerprint: ["terminal.host", "emulator-backpressure-resumed"], + }, + ); this.updateSubprocessStdoutFlow(); } diff --git a/apps/desktop/src/main/terminal-host/terminal-host.ts b/apps/desktop/src/main/terminal-host/terminal-host.ts index b84bdcd0fe8..1ad240f25d3 100644 --- a/apps/desktop/src/main/terminal-host/terminal-host.ts +++ b/apps/desktop/src/main/terminal-host/terminal-host.ts @@ -217,7 +217,7 @@ export class TerminalHost { write(request: WriteRequest): EmptyResponse { const session = this.getActiveSession(request.sessionId); - session.write(request.data); + session.write(request.data, { interactive: request.interactive }); return { success: true }; } diff --git a/apps/desktop/src/main/todo-agent/attachments-cleanup.ts b/apps/desktop/src/main/todo-agent/attachments-cleanup.ts new file mode 100644 index 00000000000..5cba22ae939 --- /dev/null +++ b/apps/desktop/src/main/todo-agent/attachments-cleanup.ts @@ -0,0 +1,107 @@ +import { readdirSync, statSync, unlinkSync } from "node:fs"; +import path from "node:path"; +import { todoSessions } from "@superset/local-db"; +import { app } from "electron"; +import { localDb } from "main/lib/local-db"; + +const ATTACHMENT_TTL_MS = 30 * 24 * 60 * 60 * 1000; // 30 days + +/** + * One-shot sweep of `userData/todo-agent/attachments/` at app startup. + * Removes any file that is BOTH: + * + * - older than 30 days (mtime), AND + * - not referenced by any `todo_sessions` row's description / + * goal / pendingIntervention / customSystemPrompt / finalAssistantText + * / verdictReason + * + * The age guard keeps the cache from growing forever while the + * reference check protects images attached to long-running or + * recently-resumed TODOs — those can still predate the 30-day + * window if the user revives an older session. + */ +export function cleanupOldAttachments(): void { + try { + const dir = path.join(app.getPath("userData"), "todo-agent", "attachments"); + let entries: string[]; + try { + entries = readdirSync(dir); + } catch { + // Directory doesn't exist yet — nothing to do. + return; + } + + // Build a set of filenames that are still referenced by any + // session's text columns. We only care about the file *basename* + // — that's unique per attachment (uuid-prefixed) and avoids + // false positives from substring matching elsewhere in the + // prompt text. + const referenced = new Set(); + try { + const rows = localDb + .select({ + description: todoSessions.description, + goal: todoSessions.goal, + pendingIntervention: todoSessions.pendingIntervention, + customSystemPrompt: todoSessions.customSystemPrompt, + finalAssistantText: todoSessions.finalAssistantText, + verdictReason: todoSessions.verdictReason, + }) + .from(todoSessions) + .all(); + // Match only our own attachment filenames — the save path + // always prefixes a UUID, so the pattern is narrow enough + // to avoid false positives from unrelated paths that happen + // to contain "attachments" in prompt text. Works across + // POSIX / Windows by allowing either separator. + const filenameRe = + /[/\\]attachments[/\\]([0-9a-f-]{36}-[^\s"'<>()\]]+)/gi; + for (const row of rows) { + for (const text of [ + row.description, + row.goal, + row.pendingIntervention, + row.customSystemPrompt, + row.finalAssistantText, + row.verdictReason, + ]) { + if (!text) continue; + for (const m of text.matchAll(filenameRe)) { + if (m[1]) referenced.add(m[1]); + } + } + } + } catch (error) { + // If the reference scan fails for any reason, bail out of + // cleanup entirely — better to keep orphans than to delete + // something that turns out to be referenced. + console.warn( + "[todo-agent] attachment reference scan failed, skipping cleanup", + error, + ); + return; + } + + const cutoff = Date.now() - ATTACHMENT_TTL_MS; + let removed = 0; + for (const name of entries) { + if (referenced.has(name)) continue; + const full = path.join(dir, name); + try { + const st = statSync(full); + if (!st.isFile()) continue; + if (st.mtimeMs < cutoff) { + unlinkSync(full); + removed += 1; + } + } catch { + // Ignore individual file errors; continue the sweep. + } + } + if (removed > 0) { + console.log(`[todo-agent] purged ${removed} stale attachment(s)`); + } + } catch (error) { + console.warn("[todo-agent] attachment cleanup failed", error); + } +} diff --git a/apps/desktop/src/main/todo-agent/daemon-bridge.ts b/apps/desktop/src/main/todo-agent/daemon-bridge.ts new file mode 100644 index 00000000000..735831a32b3 --- /dev/null +++ b/apps/desktop/src/main/todo-agent/daemon-bridge.ts @@ -0,0 +1,130 @@ +import { + disposeTodoDaemonClient, + getTodoDaemonClient, +} from "main/lib/todo-daemon/client"; +import { + getTodoSessionDebugData, + getTodoStreamBatchDebugData, + getTodoStreamEventDebugData, + todoAgentMainDebug, +} from "./debug"; +import { getTodoSessionStore } from "./session-store"; + +/** + * Wire the daemon client to the main-process session-store so tRPC + * subscribers see updates that the daemon writes from its own DB + * connection. Also issues a one-shot `rehydrate` so any session that + * was running when the previous daemon died gets marked `failed`. + * + * Safe to call multiple times; second and later calls are no-ops. + */ +let wired = false; +let connectPromise: Promise | null = null; + +export function startTodoAgentDaemonBridge(): Promise { + if (connectPromise) return connectPromise; + const client = getTodoDaemonClient(); + if (!wired) { + wired = true; + client.on("sessionState", (payload) => { + todoAgentMainDebug.info( + "todo-daemon-bridge-session-state", + getTodoSessionDebugData(payload.session), + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-daemon-bridge-session-state"], + }, + ); + getTodoSessionStore().externalEmit(payload.session); + }); + client.on("streamEvents", (payload) => { + todoAgentMainDebug.info( + "todo-daemon-bridge-stream-batch", + getTodoStreamBatchDebugData(payload.sessionId, payload.events), + ); + for (const event of payload.events) { + if ( + event.kind !== "system_init" && + event.kind !== "error" && + event.kind !== "remote_control" && + event.kind !== "remote_control_error" + ) { + continue; + } + todoAgentMainDebug.info( + "todo-daemon-bridge-stream-event", + { + sessionId: payload.sessionId, + ...getTodoStreamEventDebugData(event), + }, + { + captureMessage: true, + fingerprint: [ + "todo.agent.main", + "todo-daemon-bridge-stream-event", + event.kind, + ], + }, + ); + } + getTodoSessionStore().externalEmitStream( + payload.sessionId, + payload.events, + ); + }); + client.on("disconnected", () => { + console.warn( + "[todo-agent] daemon disconnected — will reconnect on next RPC", + ); + todoAgentMainDebug.warn("todo-daemon-bridge-disconnected", undefined, { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-daemon-bridge-disconnected"], + }); + }); + client.on("error", (error) => { + console.warn("[todo-agent] daemon client error", error); + todoAgentMainDebug.captureException( + error, + "todo-daemon-bridge-error", + undefined, + { + fingerprint: ["todo.agent.main", "todo-daemon-bridge-error"], + }, + ); + }); + } + connectPromise = (async () => { + todoAgentMainDebug.info("todo-daemon-bridge-init", undefined, { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-daemon-bridge-init"], + }); + try { + await client.ensureConnected(); + await client.rehydrate(); + todoAgentMainDebug.info("todo-daemon-bridge-init-success", undefined, { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-daemon-bridge-init-success"], + }); + } catch (error) { + console.warn("[todo-agent] daemon bridge failed to initialize", error); + todoAgentMainDebug.captureException( + error, + "todo-daemon-bridge-init-failed", + undefined, + { + fingerprint: ["todo.agent.main", "todo-daemon-bridge-init-failed"], + }, + ); + // Drop the cached promise so a later retry can try again. + connectPromise = null; + throw error; + } + })(); + return connectPromise; +} + +export function stopTodoAgentDaemonBridge(): void { + disposeTodoDaemonClient(); + connectPromise = null; + wired = false; +} diff --git a/apps/desktop/src/main/todo-agent/debug.ts b/apps/desktop/src/main/todo-agent/debug.ts new file mode 100644 index 00000000000..abd542c8c9c --- /dev/null +++ b/apps/desktop/src/main/todo-agent/debug.ts @@ -0,0 +1,77 @@ +import type { SelectTodoSession } from "@superset/local-db"; +import { createMainDebugChannel } from "../lib/debug-channel"; +import type { TodoStreamEvent } from "./types"; + +const DEBUG_TODO_AGENT = process.env.SUPERSET_TODO_DEBUG === "1"; + +// TODO Agent の作成から daemon 実行、PTY / Remote Control 分岐までを +// 一回の Sentry ログで追えるようにするための main/daemon 共通 logger。 +// 主に見たいのは: +// - renderer から送った PTY / Remote の意図が main で落ちていないか +// - runtime-config.json に正しく保存 / 読み出しできたか +// - daemon が headless / PTY / Remote Control をどう最終判定したか +// - PTY 起動後に Remote Control URL 発行まで進んだか +// Sentry には常時送り、console ミラーだけ env フラグで制御する。 +export const todoAgentMainDebug = createMainDebugChannel({ + namespace: "todo.agent.main", + enabled: true, + mirrorToConsole: DEBUG_TODO_AGENT, +}); + +export function getTodoSessionDebugData( + session: Pick< + SelectTodoSession, + | "id" + | "workspaceId" + | "projectId" + | "status" + | "phase" + | "iteration" + | "artifactPath" + | "remoteControlEnabled" + | "claudeSessionId" + | "verdictReason" + | "waitingReason" + >, +) { + return { + sessionId: session.id, + workspaceId: session.workspaceId, + projectId: session.projectId ?? null, + status: session.status, + phase: session.phase, + iteration: session.iteration, + artifactPath: session.artifactPath, + remoteControlEnabled: session.remoteControlEnabled ?? false, + hasClaudeSessionId: Boolean(session.claudeSessionId), + verdictReason: session.verdictReason ?? null, + waitingReason: session.waitingReason ?? null, + }; +} + +export function getTodoStreamEventDebugData( + event: Pick, +) { + return { + eventId: event.id, + iteration: event.iteration, + kind: event.kind, + label: event.label, + textPreview: event.text, + }; +} + +export function getTodoStreamBatchDebugData( + sessionId: string, + events: readonly Pick[], +) { + const kinds = Array.from(new Set(events.map((event) => event.kind))); + const lastEvent = events.length > 0 ? events[events.length - 1] : null; + return { + sessionId, + eventCount: events.length, + eventKinds: kinds.join(","), + firstKind: events[0]?.kind ?? null, + lastKind: lastEvent?.kind ?? null, + }; +} diff --git a/apps/desktop/src/main/todo-agent/enhance-text.ts b/apps/desktop/src/main/todo-agent/enhance-text.ts new file mode 100644 index 00000000000..cc24ad23d9c --- /dev/null +++ b/apps/desktop/src/main/todo-agent/enhance-text.ts @@ -0,0 +1,102 @@ +import { generateText, type LanguageModel } from "ai"; +import { + callSmallModel, + type SmallModelAttempt, +} from "lib/ai/call-small-model"; + +/** + * AI-rewrite helper for the TODO creation form. Takes a piece of user- + * written text (rough description or rough goal) and rewrites it into a + * clearer, LLM-friendly instruction. Uses the existing `callSmallModel` + * plumbing so credentials, provider fallback, and diagnostics all come + * for free — same path as the workspace auto-namer. + * + * The system prompts are deliberately kept short and concrete. They do + * NOT add length; they rewrite in place. + */ + +export type TodoTextKind = "description" | "goal"; + +const INSTRUCTIONS: Record = { + description: [ + "あなたはユーザーが書いた雑な TODO の記述を、自律コーディングエージェントが理解しやすい明確な指示に書き換えるアシスタントです。", + "", + "次の観点で書き換えてください:", + "- 何をすべきかを具体的に", + "- 前提・対象ファイル・制約が推測できる範囲で明示", + "- 曖昧な表現(ちゃんと/きれいに/いい感じに 等)を避ける", + "- 元の意図は絶対に保つ。新しい要件を勝手に追加しない", + "- 過剰な装飾・前置き・解説を付けない", + "- 日本語で書く", + "- 1〜6 行程度に収める", + "- 出力は書き換え後のテキストのみ。引用符や見出しを付けない", + ].join("\n"), + goal: [ + "あなたはユーザーが書いた雑な TODO のゴールを、自律コーディングエージェントが完了判定に使える明確な受け入れ条件に書き換えるアシスタントです。", + "", + "次の観点で書き換えてください:", + "- 「〜ができている」「〜が動作している」「〜が存在する」など検証可能な形にする", + "- 複数ある場合は箇条書き(行頭 '- ')で列挙", + "- 曖昧な表現を避ける", + "- 元の意図を保つ", + "- 日本語で書く", + "- 合計で 1〜6 行程度に収める", + "- 出力は書き換え後のテキストのみ。引用符や見出しを付けない", + ].join("\n"), +}; + +export interface EnhanceTodoTextResult { + text: string | null; + attempts: SmallModelAttempt[]; +} + +export async function enhanceTodoText( + rawText: string, + kind: TodoTextKind, +): Promise { + const cleaned = rawText.trim(); + if (!cleaned) { + return { text: null, attempts: [] }; + } + + const system = INSTRUCTIONS[kind]; + + const { result, attempts } = await callSmallModel({ + invoke: async ({ model }) => { + const { text } = await generateText({ + model: model as LanguageModel, + system, + prompt: cleaned, + }); + const trimmed = text.trim(); + return trimmed.length > 0 ? trimmed : null; + }, + }); + + return { text: result ?? null, attempts }; +} + +/** + * Turn a failed `callSmallModel` attempt list into a user-facing error + * message in Japanese. Returns a generic fallback if no attempt carries + * a useful reason. + */ +export function describeEnhanceFailure(attempts: SmallModelAttempt[]): string { + for (let index = attempts.length - 1; index >= 0; index -= 1) { + const attempt = attempts[index]; + if (!attempt) continue; + if (attempt.outcome === "expired-credentials") { + return `${attempt.issue?.message ?? `${attempt.providerName} の認証が切れています`}。設定から再接続してください。`; + } + if (attempt.outcome === "failed") { + return `${attempt.providerName} での書き換えに失敗しました: ${attempt.issue?.message ?? attempt.reason ?? "unknown"}`; + } + if (attempt.outcome === "unsupported-credentials") { + return `${attempt.providerName} の認証種別が書き換えに対応していません。`; + } + } + if (attempts.every((a) => a.outcome === "missing-credentials")) { + return "AI 書き換えに使えるモデルアカウントが接続されていません。設定から Anthropic か OpenAI を接続してください。"; + } + return "AI 書き換えに失敗しました。"; +} diff --git a/apps/desktop/src/main/todo-agent/git-status.ts b/apps/desktop/src/main/todo-agent/git-status.ts new file mode 100644 index 00000000000..6b24b0b505f --- /dev/null +++ b/apps/desktop/src/main/todo-agent/git-status.ts @@ -0,0 +1,301 @@ +import { execGitWithShellPath } from "lib/trpc/routers/workspaces/utils/git-client"; + +/** + * Git inspection helpers scoped to a TODO session. + * + * All operations are read-only and routed through `execGitWithShellPath` + * so shell PATH is resolved correctly (same helper the rest of the app's + * git plumbing uses). The session's `startHeadSha` column — captured by + * the supervisor the moment `runSession` begins — anchors "what this + * session produced" vs. "what was already there", so commits the user + * made before the session are never attributed to it. + */ + +async function gitOut(args: string[], cwd: string): Promise { + try { + const { stdout } = await execGitWithShellPath(args, { cwd }); + return stdout; + } catch { + return ""; + } +} + +/** + * Does `sha` resolve to a commit object in `cwd`'s git dir? Used to + * distinguish "no new commits" from "startHeadSha was orphaned by a + * reset/rebase" — otherwise both look identical in the sidebar. + */ +async function gitRevExists(sha: string, cwd: string): Promise { + try { + await execGitWithShellPath( + ["rev-parse", "--verify", "--quiet", `${sha}^{commit}`], + { cwd }, + ); + return true; + } catch { + return false; + } +} + +export async function getCurrentHeadSha(cwd: string): Promise { + const out = (await gitOut(["rev-parse", "HEAD"], cwd)).trim(); + return out || null; +} + +export interface SessionGitCommit { + sha: string; + shortSha: string; + subject: string; + authorName: string; + authorDate: string; +} + +export type SessionGitFileStage = "staged" | "unstaged" | "untracked"; + +export interface SessionGitFile { + path: string; + stage: SessionGitFileStage; + /** Raw git status letter — M / A / D / R / C / U / ? */ + code: string; +} + +export interface SessionGitChangedFile { + path: string; + /** First letter of git's name-status code: A / M / D / R / C / T */ + code: string; +} + +export interface SessionGitSnapshot { + branch: string | null; + startHeadSha: string | null; + currentHeadSha: string | null; + commits: SessionGitCommit[]; + workingTree: SessionGitFile[]; + /** + * Files whose contents differ between `startHeadSha` and HEAD + * (two-dot `git diff`). Populated regardless of whether HEAD is a + * descendant of startHeadSha, so branch switches / rebases still + * surface the cumulative session delta instead of silently + * rendering an empty sidebar. + */ + sessionFiles: SessionGitChangedFile[]; + /** + * True when `startHeadSha` is set but its commit object is no + * longer reachable (e.g. the branch was reset and the object was + * pruned, or a different repo was swapped in under the worktree). + * The UI uses this to show an explanatory message rather than a + * silently empty panel. + */ + startHeadUnreachable: boolean; + ahead: number; + behind: number; +} + +const COMMIT_DELIM = "\x00"; +const COMMIT_FORMAT = ["%H", "%h", "%s", "%an", "%aI"].join(COMMIT_DELIM); + +export async function getSessionGitSnapshot(params: { + cwd: string; + startHeadSha: string | null; +}): Promise { + const { cwd, startHeadSha } = params; + + const [branchOut, currentOut] = await Promise.all([ + gitOut(["rev-parse", "--abbrev-ref", "HEAD"], cwd), + gitOut(["rev-parse", "HEAD"], cwd), + ]); + const branch = branchOut.trim() || null; + const currentHeadSha = currentOut.trim() || null; + + // Commits produced since the session started. Scoped to the range + // `startHeadSha..HEAD`; when HEAD is not a descendant of + // startHeadSha (branch switch / reset / rebase), this can validly + // return an empty list, and we surface cumulative file-level + // changes via `sessionFiles` below so the sidebar isn't empty. + let commits: SessionGitCommit[] = []; + let sessionFiles: SessionGitChangedFile[] = []; + let startHeadUnreachable = false; + if (startHeadSha && currentHeadSha && startHeadSha !== currentHeadSha) { + const reachable = await gitRevExists(startHeadSha, cwd); + if (!reachable) { + startHeadUnreachable = true; + } else { + const logOut = await gitOut( + [ + "log", + `${startHeadSha}..${currentHeadSha}`, + `--format=${COMMIT_FORMAT}`, + ], + cwd, + ); + commits = logOut + .split("\n") + .filter((l) => l.length > 0) + .map((line) => { + const [sha, shortSha, subject, authorName, authorDate] = + line.split(COMMIT_DELIM); + return { + sha: sha ?? "", + shortSha: shortSha ?? "", + subject: subject ?? "", + authorName: authorName ?? "", + authorDate: authorDate ?? "", + }; + }); + + // `git diff --name-status -z A B` compares the two commits + // directly (two-dot in diff has no range semantics), so it + // works even when A and B are on divergent histories. This + // is what lets the sidebar show the real session delta + // when commits are zero but files were touched. + const diffOut = await gitOut( + ["diff", "--name-status", "-z", startHeadSha, currentHeadSha], + cwd, + ); + sessionFiles = parseNameStatusNul(diffOut); + } + } + + // Working tree state via porcelain v1 for stable parsing. + const statusOut = await gitOut( + ["status", "--porcelain=v1", "--untracked-files=all"], + cwd, + ); + const workingTree: SessionGitFile[] = []; + const seen = new Set(); + for (const line of statusOut.split("\n")) { + if (line.length < 3) continue; + const indexStatus = line[0] ?? " "; + const wtStatus = line[1] ?? " "; + const filePath = line.slice(3); + const key = `${filePath}|${indexStatus}${wtStatus}`; + if (seen.has(key)) continue; + seen.add(key); + if (indexStatus === "?" && wtStatus === "?") { + workingTree.push({ path: filePath, stage: "untracked", code: "?" }); + continue; + } + if (indexStatus !== " " && indexStatus !== "?") { + workingTree.push({ + path: filePath, + stage: "staged", + code: indexStatus, + }); + } + if (wtStatus !== " " && wtStatus !== "?") { + workingTree.push({ + path: filePath, + stage: "unstaged", + code: wtStatus, + }); + } + } + + // Ahead/behind relative to upstream, if configured. Failure is + // expected when no upstream is set, so swallow silently. + let ahead = 0; + let behind = 0; + const rlOut = ( + await gitOut(["rev-list", "--left-right", "--count", "HEAD...@{u}"], cwd) + ).trim(); + if (rlOut) { + const parts = rlOut.split(/\s+/); + if (parts.length === 2) { + ahead = Number(parts[0]) || 0; + behind = Number(parts[1]) || 0; + } + } + + return { + branch, + startHeadSha, + currentHeadSha, + commits, + workingTree, + sessionFiles, + startHeadUnreachable, + ahead, + behind, + }; +} + +/** + * Parse `git diff --name-status -z` output. + * + * Standard entries are `\0\0`; rename/copy entries are + * `\0\0\0` — we keep only the new path + * and collapse the code to its first letter so the UI can render a + * single badge per file. + */ +function parseNameStatusNul(raw: string): SessionGitChangedFile[] { + const files: SessionGitChangedFile[] = []; + const parts = raw.split("\0"); + let i = 0; + while (i < parts.length) { + const token = parts[i]; + if (!token) { + i += 1; + continue; + } + const letter = token[0] ?? ""; + if (letter === "R" || letter === "C") { + const newPath = parts[i + 2]; + if (newPath) files.push({ path: newPath, code: letter }); + i += 3; + continue; + } + const p = parts[i + 1]; + if (p) files.push({ path: p, code: letter || token }); + i += 2; + } + return files; +} + +export type SessionDiffScope = "session" | "staged" | "unstaged" | "commit"; + +export async function getSessionFileDiff(params: { + cwd: string; + startHeadSha: string | null; + path: string; + scope: SessionDiffScope; + commitSha?: string; +}): Promise { + const { cwd, startHeadSha, path, scope, commitSha } = params; + const args: string[] = ["--no-pager", "diff", "--no-color"]; + + switch (scope) { + case "session": + if (!startHeadSha) return ""; + args.push(`${startHeadSha}..HEAD`, "--", path); + break; + case "staged": + args.push("--cached", "--", path); + break; + case "unstaged": + args.push("--", path); + break; + case "commit": { + if (!commitSha) return ""; + // Whole-commit diff: `git show --format= ` returns just + // the patch, no commit header. When the caller supplies a + // path we scope to that file via `-- `; when the path + // is empty (UI selects a commit row, not a specific file), + // we must NOT append an empty pathspec or Git rejects it + // with "empty string is not a valid pathspec" and the diff + // silently disappears from the sidebar. + const showArgs = [ + "--no-pager", + "show", + "--no-color", + "--format=", + commitSha, + ]; + if (path && path.length > 0) { + showArgs.push("--", path); + } + return gitOut(showArgs, cwd); + } + } + + return gitOut(args, cwd); +} diff --git a/apps/desktop/src/main/todo-agent/index.ts b/apps/desktop/src/main/todo-agent/index.ts new file mode 100644 index 00000000000..2c85c8935fb --- /dev/null +++ b/apps/desktop/src/main/todo-agent/index.ts @@ -0,0 +1,9 @@ +export { cleanupOldAttachments } from "./attachments-cleanup"; +export { getTodoScheduleStore } from "./schedule-store"; +export { getTodoScheduler } from "./scheduler"; +export { getTodoSessionStore } from "./session-store"; +export { cleanupOldSessions } from "./sessions-cleanup"; +export { getTodoSupervisor } from "./supervisor"; +export type { TodoAgentRouter } from "./trpc-router"; +export { createTodoAgentRouter } from "./trpc-router"; +export * from "./types"; diff --git a/apps/desktop/src/main/todo-agent/runtime-config.ts b/apps/desktop/src/main/todo-agent/runtime-config.ts new file mode 100644 index 00000000000..26d83414625 --- /dev/null +++ b/apps/desktop/src/main/todo-agent/runtime-config.ts @@ -0,0 +1,184 @@ +import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; +import path from "node:path"; +import { todoAgentMainDebug } from "./debug"; + +const TODO_RUNTIME_CONFIG_FILE = "runtime-config.json"; + +export interface TodoSessionRuntimeConfig { + ptyEnabled: boolean; + remoteControlEnabled: boolean; +} + +function getRuntimeConfigPath(artifactPath: string): string { + return path.join(artifactPath, TODO_RUNTIME_CONFIG_FILE); +} + +function normalizeConfig( + config: TodoSessionRuntimeConfig, +): TodoSessionRuntimeConfig { + const ptyEnabled = config.ptyEnabled === true; + return { + ptyEnabled, + remoteControlEnabled: ptyEnabled && config.remoteControlEnabled === true, + }; +} + +export function readTodoSessionRuntimeConfig(params: { + artifactPath: string; + fallbackRemoteControlEnabled?: boolean | null; +}): TodoSessionRuntimeConfig { + const legacyRemoteControlEnabled = + params.fallbackRemoteControlEnabled === true; + const legacyFallback = { + ptyEnabled: legacyRemoteControlEnabled, + remoteControlEnabled: legacyRemoteControlEnabled, + }; + if (!path.isAbsolute(params.artifactPath)) { + todoAgentMainDebug.warn( + "todo-runtime-config-read-fallback", + { + artifactPath: params.artifactPath, + reason: "artifact-path-not-absolute", + fallbackPtyEnabled: legacyFallback.ptyEnabled, + fallbackRemoteControlEnabled: legacyFallback.remoteControlEnabled, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-runtime-config-read-fallback"], + }, + ); + return legacyFallback; + } + + const filePath = getRuntimeConfigPath(params.artifactPath); + if (!existsSync(filePath)) { + todoAgentMainDebug.warn( + "todo-runtime-config-read-fallback", + { + artifactPath: params.artifactPath, + filePath, + reason: "runtime-config-missing", + fallbackPtyEnabled: legacyFallback.ptyEnabled, + fallbackRemoteControlEnabled: legacyFallback.remoteControlEnabled, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-runtime-config-read-fallback"], + }, + ); + return legacyFallback; + } + + try { + const parsed = JSON.parse( + readFileSync(filePath, "utf8"), + ) as Partial | null; + if (!parsed || typeof parsed !== "object") { + todoAgentMainDebug.warn( + "todo-runtime-config-read-fallback", + { + artifactPath: params.artifactPath, + filePath, + reason: "runtime-config-invalid-json-shape", + fallbackPtyEnabled: legacyFallback.ptyEnabled, + fallbackRemoteControlEnabled: legacyFallback.remoteControlEnabled, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-runtime-config-read-fallback"], + }, + ); + return legacyFallback; + } + const normalized = normalizeConfig({ + ptyEnabled: parsed.ptyEnabled === true, + remoteControlEnabled: parsed.remoteControlEnabled === true, + }); + todoAgentMainDebug.info( + "todo-runtime-config-read", + { + artifactPath: params.artifactPath, + filePath, + ptyEnabled: normalized.ptyEnabled, + remoteControlEnabled: normalized.remoteControlEnabled, + usedFallback: false, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-runtime-config-read"], + }, + ); + return normalized; + } catch (error) { + console.warn("[todo-agent] failed to read runtime config", error); + todoAgentMainDebug.captureException( + error, + "todo-runtime-config-read-failed", + { + artifactPath: params.artifactPath, + filePath, + fallbackPtyEnabled: legacyFallback.ptyEnabled, + fallbackRemoteControlEnabled: legacyFallback.remoteControlEnabled, + }, + { + fingerprint: ["todo.agent.main", "todo-runtime-config-read-failed"], + }, + ); + return legacyFallback; + } +} + +export function writeTodoSessionRuntimeConfig( + artifactPath: string, + config: TodoSessionRuntimeConfig, +): void { + if (!path.isAbsolute(artifactPath)) { + todoAgentMainDebug.warn( + "todo-runtime-config-write-skipped", + { + artifactPath, + reason: "artifact-path-not-absolute", + ptyEnabled: config.ptyEnabled, + remoteControlEnabled: config.remoteControlEnabled, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-runtime-config-write-skipped"], + }, + ); + return; + } + try { + const normalized = normalizeConfig(config); + const filePath = getRuntimeConfigPath(artifactPath); + mkdirSync(artifactPath, { recursive: true }); + writeFileSync(filePath, `${JSON.stringify(normalized, null, 2)}\n`, "utf8"); + todoAgentMainDebug.info( + "todo-runtime-config-write", + { + artifactPath, + filePath, + ptyEnabled: normalized.ptyEnabled, + remoteControlEnabled: normalized.remoteControlEnabled, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-runtime-config-write"], + }, + ); + } catch (error) { + console.warn("[todo-agent] failed to write runtime config", error); + todoAgentMainDebug.captureException( + error, + "todo-runtime-config-write-failed", + { + artifactPath, + ptyEnabled: config.ptyEnabled, + remoteControlEnabled: config.remoteControlEnabled, + }, + { + fingerprint: ["todo.agent.main", "todo-runtime-config-write-failed"], + }, + ); + } +} diff --git a/apps/desktop/src/main/todo-agent/schedule-store.ts b/apps/desktop/src/main/todo-agent/schedule-store.ts new file mode 100644 index 00000000000..1f4692503bb --- /dev/null +++ b/apps/desktop/src/main/todo-agent/schedule-store.ts @@ -0,0 +1,225 @@ +import { EventEmitter } from "node:events"; +import { + type InsertTodoSchedule, + type SelectTodoSchedule, + todoSchedules, +} from "@superset/local-db"; +import { and, desc, eq, isNotNull, lte } from "drizzle-orm"; +import { localDb } from "main/lib/local-db"; +import type { + TodoScheduleCreateInput, + TodoScheduleFireEvent, + TodoScheduleUpdateInput, +} from "./types"; + +/** + * Persistence layer for the TODO agent schedules table plus an event bus the + * scheduler uses to broadcast fire events into the tRPC subscription. + * + * Kept deliberately thin: the scheduler is responsible for cadence math, + * this module just does CRUD + emit. + */ +class TodoScheduleStore { + private readonly emitter = new EventEmitter(); + /** + * Cached init failure (kind="failed", scheduleId="__scheduler_init__"). + * The renderer subscribes after it mounts, which is well after the + * main-process bootstrap emits the failure. Replaying it on first + * subscription ensures the user still sees the toast. + */ + private pendingInitFailure: TodoScheduleFireEvent | null = null; + + emitFire(event: TodoScheduleFireEvent): void { + if (event.kind === "failed" && event.scheduleId === "__scheduler_init__") { + this.pendingInitFailure = event; + } + this.emitter.emit("fire", event); + } + + onFire(handler: (event: TodoScheduleFireEvent) => void): () => void { + this.emitter.on("fire", handler); + if (this.pendingInitFailure) { + const replayed = this.pendingInitFailure; + this.pendingInitFailure = null; + // Replay asynchronously so the subscriber is fully wired up + // before its handler runs, matching ordinary emit timing. + queueMicrotask(() => handler(replayed)); + } + return () => { + this.emitter.off("fire", handler); + }; + } + + insert( + input: TodoScheduleCreateInput & { nextRunAt: number | null }, + ): SelectTodoSchedule { + const row: InsertTodoSchedule = { + projectId: input.projectId, + workspaceId: input.workspaceId ?? null, + name: input.name, + enabled: input.enabled, + frequency: input.frequency, + minute: input.minute ?? null, + hour: input.hour ?? null, + weekday: input.weekday ?? null, + monthday: input.monthday ?? null, + cronExpr: input.cronExpr ?? null, + title: input.title, + description: input.description, + goal: input.goal ?? null, + verifyCommand: input.verifyCommand ?? null, + maxIterations: input.maxIterations, + maxWallClockSec: input.maxWallClockSec, + customSystemPrompt: input.customSystemPrompt ?? null, + claudeModel: input.claudeModel ?? null, + claudeEffort: input.claudeEffort ?? null, + overlapMode: input.overlapMode, + autoSyncBeforeFire: input.autoSyncBeforeFire, + nextRunAt: input.nextRunAt, + }; + + return localDb.insert(todoSchedules).values(row).returning().get(); + } + + update(input: TodoScheduleUpdateInput): SelectTodoSchedule | undefined { + const { id, ...rest } = input; + const patch: Partial & { updatedAt: number } = { + updatedAt: Date.now(), + }; + if (rest.name !== undefined) patch.name = rest.name; + if (rest.enabled !== undefined) patch.enabled = rest.enabled; + if (rest.frequency !== undefined) patch.frequency = rest.frequency; + if (rest.minute !== undefined) patch.minute = rest.minute ?? null; + if (rest.hour !== undefined) patch.hour = rest.hour ?? null; + if (rest.weekday !== undefined) patch.weekday = rest.weekday ?? null; + if (rest.monthday !== undefined) patch.monthday = rest.monthday ?? null; + if (rest.cronExpr !== undefined) patch.cronExpr = rest.cronExpr ?? null; + if (rest.title !== undefined) patch.title = rest.title; + if (rest.description !== undefined) patch.description = rest.description; + if (rest.goal !== undefined) patch.goal = rest.goal ?? null; + if (rest.verifyCommand !== undefined) + patch.verifyCommand = rest.verifyCommand ?? null; + if (rest.maxIterations !== undefined) + patch.maxIterations = rest.maxIterations; + if (rest.maxWallClockSec !== undefined) + patch.maxWallClockSec = rest.maxWallClockSec; + if (rest.customSystemPrompt !== undefined) + patch.customSystemPrompt = rest.customSystemPrompt ?? null; + if (rest.claudeModel !== undefined) + patch.claudeModel = rest.claudeModel ?? null; + if (rest.claudeEffort !== undefined) + patch.claudeEffort = rest.claudeEffort ?? null; + if (rest.overlapMode !== undefined) patch.overlapMode = rest.overlapMode; + if (rest.autoSyncBeforeFire !== undefined) + patch.autoSyncBeforeFire = rest.autoSyncBeforeFire; + if (rest.workspaceId !== undefined) + patch.workspaceId = rest.workspaceId ?? null; + // projectId is intentionally not patched here — it is immutable + // once the schedule is created. + + return localDb + .update(todoSchedules) + .set(patch) + .where(eq(todoSchedules.id, id)) + .returning() + .get(); + } + + setNextRunAt(id: string, nextRunAt: number | null): void { + localDb + .update(todoSchedules) + .set({ nextRunAt, updatedAt: Date.now() }) + .where(eq(todoSchedules.id, id)) + .run(); + } + + recordRun({ + id, + sessionId, + firedAt, + nextRunAt, + }: { + id: string; + sessionId: string | null; + firedAt: number; + nextRunAt: number | null; + }): void { + localDb + .update(todoSchedules) + .set({ + lastRunAt: firedAt, + lastRunSessionId: sessionId, + nextRunAt, + updatedAt: Date.now(), + }) + .where(eq(todoSchedules.id, id)) + .run(); + } + + setEnabled(id: string, enabled: boolean): SelectTodoSchedule | undefined { + return localDb + .update(todoSchedules) + .set({ enabled, updatedAt: Date.now() }) + .where(eq(todoSchedules.id, id)) + .returning() + .get(); + } + + get(id: string): SelectTodoSchedule | undefined { + return localDb + .select() + .from(todoSchedules) + .where(eq(todoSchedules.id, id)) + .get(); + } + + delete(id: string): boolean { + const result = localDb + .delete(todoSchedules) + .where(eq(todoSchedules.id, id)) + .run(); + return result.changes > 0; + } + + listForProject(projectId: string): SelectTodoSchedule[] { + return localDb + .select() + .from(todoSchedules) + .where(eq(todoSchedules.projectId, projectId)) + .orderBy(desc(todoSchedules.createdAt)) + .all(); + } + + listAll(): SelectTodoSchedule[] { + return localDb + .select() + .from(todoSchedules) + .orderBy(desc(todoSchedules.createdAt)) + .all(); + } + + listDue(now: number): SelectTodoSchedule[] { + return localDb + .select() + .from(todoSchedules) + .where( + and( + eq(todoSchedules.enabled, true), + isNotNull(todoSchedules.nextRunAt), + lte(todoSchedules.nextRunAt, now), + ), + ) + .all(); + } +} + +let instance: TodoScheduleStore | null = null; + +export function getTodoScheduleStore(): TodoScheduleStore { + if (!instance) { + instance = new TodoScheduleStore(); + } + return instance; +} + +export type { TodoScheduleStore }; diff --git a/apps/desktop/src/main/todo-agent/schedule-sync.ts b/apps/desktop/src/main/todo-agent/schedule-sync.ts new file mode 100644 index 00000000000..4e3ba5bc97e --- /dev/null +++ b/apps/desktop/src/main/todo-agent/schedule-sync.ts @@ -0,0 +1,97 @@ +import { execGitWithShellPath } from "lib/trpc/routers/workspaces/utils/git-client"; + +export type ScheduleSyncResult = + | { kind: "ok"; checkedOut: string } + | { kind: "dirty"; message: string } + | { kind: "error"; message: string }; + +async function runGit( + args: string[], + cwd: string, + timeout = 60_000, +): Promise<{ stdout: string; stderr: string }> { + const result = await execGitWithShellPath(args, { cwd, timeout }); + return { stdout: result.stdout, stderr: result.stderr }; +} + +async function hasUncommittedChanges(cwd: string): Promise { + try { + const { stdout } = await runGit(["status", "--porcelain"], cwd, 15_000); + return stdout.trim().length > 0; + } catch { + // If status itself fails we can't be sure — treat as dirty to + // avoid destructive actions. + return true; + } +} + +async function resolveDefaultBranch(cwd: string): Promise { + try { + const { stdout } = await runGit( + ["symbolic-ref", "refs/remotes/origin/HEAD", "--short"], + cwd, + 10_000, + ); + const ref = stdout.trim(); + const defaultBranch = ref.replace(/^origin\//, ""); + if (defaultBranch) { + return defaultBranch; + } + } catch {} + // Fallbacks — conservative default. + return "main"; +} + +/** + * Opt-in "freshen the main repo before firing a schedule". Keeps the + * scope deliberately narrow: + * + * - `git fetch origin` + * - abort if the working tree has uncommitted changes (never stash — + * we refuse to touch the user's work) + * - `git checkout ` + * - `git pull --ff-only origin ` + * + * Called only when `todoSchedule.autoSyncBeforeFire` is true and the + * schedule is firing on the project main repo (no specific worktree). + */ +export async function autoSyncProjectMain( + cwd: string, +): Promise { + try { + await runGit(["fetch", "origin"], cwd, 120_000); + } catch (error) { + const message = + error instanceof Error ? error.message : "git fetch が失敗しました"; + return { kind: "error", message }; + } + + if (await hasUncommittedChanges(cwd)) { + return { + kind: "dirty", + message: "未コミット変更があるため main を更新できませんでした", + }; + } + + const defaultBranch = await resolveDefaultBranch(cwd); + + try { + await runGit(["checkout", defaultBranch], cwd, 60_000); + } catch (error) { + const message = + error instanceof Error + ? error.message + : `git checkout ${defaultBranch} が失敗しました`; + return { kind: "error", message }; + } + + try { + await runGit(["pull", "--ff-only", "origin", defaultBranch], cwd, 120_000); + } catch (error) { + const message = + error instanceof Error ? error.message : "git pull が失敗しました"; + return { kind: "error", message }; + } + + return { kind: "ok", checkedOut: defaultBranch }; +} diff --git a/apps/desktop/src/main/todo-agent/scheduler.ts b/apps/desktop/src/main/todo-agent/scheduler.ts new file mode 100644 index 00000000000..a2292cdb690 --- /dev/null +++ b/apps/desktop/src/main/todo-agent/scheduler.ts @@ -0,0 +1,443 @@ +import type { SelectTodoSchedule, SelectTodoSession } from "@superset/local-db"; +import { CronExpressionParser } from "cron-parser"; +import { getTodoScheduleStore } from "./schedule-store"; +import { autoSyncProjectMain } from "./schedule-sync"; +import { + ensureProjectBranchWorkspaceId, + getTodoSessionStore, + resolveWorktreePath, +} from "./session-store"; +import { getTodoSupervisor } from "./supervisor"; +import type { TodoScheduleFireEvent } from "./types"; + +const TICK_INTERVAL_MS = 30_000; + +/** + * Compute the next fire time (epoch ms) for a schedule, starting from + * `from`. For `custom` we delegate to cron-parser; for the builder-backed + * frequencies we compute it directly to avoid forcing the user through + * cron syntax. + */ +export function computeNextRunAt( + schedule: Pick< + SelectTodoSchedule, + "frequency" | "minute" | "hour" | "weekday" | "monthday" | "cronExpr" + >, + from: Date, +): number | null { + if (schedule.frequency === "custom") { + if (!schedule.cronExpr) return null; + try { + const interval = CronExpressionParser.parse(schedule.cronExpr, { + currentDate: from, + }); + return interval.next().getTime(); + } catch { + return null; + } + } + + const minute = schedule.minute ?? 0; + const hour = schedule.hour ?? 0; + const next = new Date(from); + // Snap seconds/ms to zero so fires land exactly on the minute boundary. + next.setSeconds(0, 0); + + switch (schedule.frequency) { + case "hourly": { + next.setMinutes(minute); + if (next.getTime() <= from.getTime()) { + next.setHours(next.getHours() + 1); + } + return next.getTime(); + } + case "daily": { + next.setHours(hour, minute, 0, 0); + if (next.getTime() <= from.getTime()) { + next.setDate(next.getDate() + 1); + } + return next.getTime(); + } + case "weekly": { + const targetWeekday = schedule.weekday ?? 0; + next.setHours(hour, minute, 0, 0); + const currentWeekday = next.getDay(); + let delta = targetWeekday - currentWeekday; + if (delta < 0) delta += 7; + if (delta === 0 && next.getTime() <= from.getTime()) { + delta = 7; + } + next.setDate(next.getDate() + delta); + return next.getTime(); + } + case "monthly": { + const targetMonthday = schedule.monthday ?? 1; + // Snap the target to the last valid day of each month so + // e.g. "every 31st" doesn't overflow Feb to Mar 3 — users + // who pick 31 expect "last day of every month" on short + // months. + const placeOnMonth = (base: Date) => { + const lastDay = new Date( + base.getFullYear(), + base.getMonth() + 1, + 0, + ).getDate(); + base.setDate(Math.min(targetMonthday, lastDay)); + base.setHours(hour, minute, 0, 0); + }; + next.setDate(1); + placeOnMonth(next); + if (next.getTime() <= from.getTime()) { + next.setDate(1); + next.setMonth(next.getMonth() + 1); + placeOnMonth(next); + } + return next.getTime(); + } + default: + return null; + } +} + +function isSessionActive(session: SelectTodoSession | undefined): boolean { + if (!session) return false; + return ( + session.status === "queued" || + session.status === "preparing" || + session.status === "running" || + session.status === "verifying" || + session.status === "paused" || + // `waiting` means the worker called `ScheduleWakeup` to pause + // itself and will be resumed by the scheduler tick. Count it as + // active so the overlap guard and the concurrency display do not + // treat a self-parked session as finished. + session.status === "waiting" + ); +} + +class TodoScheduler { + private timer: ReturnType | null = null; + private inFlight = false; + private isStopped = false; + + start(): void { + if (this.timer) return; + this.isStopped = false; + // Re-seed nextRunAt for any schedule that lost its value (e.g. migration + // from schedules inserted before this field got populated). Safe to + // re-run on every boot because `lastRunAt` is respected. + this.rebuildNextRunTimes(); + this.timer = setInterval(() => { + void this.tick(); + }, TICK_INTERVAL_MS); + // Run an immediate tick so schedules already past-due when the app + // starts up fire on the first 30s window instead of waiting for it. + void this.tick(); + } + + stop(): void { + this.isStopped = true; + if (this.timer) { + clearInterval(this.timer); + this.timer = null; + } + } + + private rebuildNextRunTimes(): void { + const store = getTodoScheduleStore(); + const now = new Date(); + for (const schedule of store.listAll()) { + if (!schedule.enabled) continue; + if (schedule.nextRunAt !== null) continue; + const next = computeNextRunAt(schedule, now); + if (next !== null) { + store.setNextRunAt(schedule.id, next); + } + } + } + + /** + * Public hook used by the tRPC layer when a schedule is created or its + * cadence definition changes. Recomputes nextRunAt relative to `now`. + */ + refreshNextRunAt(scheduleId: string): void { + const store = getTodoScheduleStore(); + const schedule = store.get(scheduleId); + if (!schedule) return; + if (!schedule.enabled) { + store.setNextRunAt(scheduleId, null); + return; + } + const next = computeNextRunAt(schedule, new Date()); + store.setNextRunAt(scheduleId, next); + } + + private async tick(): Promise { + if (this.inFlight || this.isStopped) return; + this.inFlight = true; + try { + const store = getTodoScheduleStore(); + // Wake self-paced (`ScheduleWakeup`) sessions whose deadline + // has passed before we process new schedule fires. Doing this + // first means a schedule firing into an already-waiting + // session sees the updated status and respects overlap mode. + this.resumeDueWaitingSessions(); + // Snapshot "due" using tick start time, but compute each + // schedule's firedAt from the actual moment fire() runs. + // Otherwise a slow fire leaves the next schedule in the loop + // advancing `nextRunAt` from a stale tick-start timestamp — + // for minute-level cron that can emit a "next run" already + // in the past and trigger duplicate fires on the next tick. + const due = store.listDue(Date.now()); + for (const schedule of due) { + // Abort mid-iteration if a shutdown came in while we were + // awaiting a previous fire. Prevents inserting a session + // row after closeLocalDb() has torn down SQLite. + if (this.isStopped) break; + await this.fire(schedule, Date.now()); + } + } catch (error) { + console.warn("[todo-scheduler] tick failed:", error); + } finally { + this.inFlight = false; + } + } + + /** + * Scan for `waiting` sessions whose `waitingUntil` has elapsed and + * hand them back to the supervisor. The status flip is gated on the + * row still being `waiting` at claim time so a race with the user + * clicking Abort (which writes `aborted`) between `listWaitingDue` + * and the update cannot resurrect an abort into a fresh run. + * + * `supervisor.start` is currently a synchronous queue+drain wrapper + * that does not throw, so the trailing `.catch` here is purely a + * defensive log path: if a future change to `start` introduces + * validation throws, the rejection still surfaces in the console + * instead of becoming an unhandled rejection. Run-time failures + * inside `runSession` are owned by the supervisor's own drain + * pipeline and are not the scheduler's responsibility. + */ + private resumeDueWaitingSessions(): void { + const sessionStore = getTodoSessionStore(); + const due = sessionStore.listWaitingDue(Date.now()); + if (due.length === 0) return; + const supervisor = getTodoSupervisor(); + for (const session of due) { + if (this.isStopped) return; + const claimed = sessionStore.claimWaitingForResume(session.id); + if (!claimed) continue; + // Tag this start as a scheduler-driven wakeup so the + // supervisor can skip the "再開" banner and use a short + // continuation prompt instead of re-replaying the goal + // (issue #240). + void supervisor + .start(session.id, { fromScheduledWakeup: true }) + .catch((err) => { + console.warn( + `[todo-scheduler] supervisor.start unexpectedly rejected for ${session.id}:`, + err, + ); + }); + } + } + + private async fire( + schedule: SelectTodoSchedule, + firedAt: number, + ): Promise { + const store = getTodoScheduleStore(); + const sessionStore = getTodoSessionStore(); + const supervisor = getTodoSupervisor(); + + const nextRunAt = computeNextRunAt(schedule, new Date(firedAt)); + + // Overlap guard: if a previous session from this schedule is still + // active and the user asked us to skip, short-circuit without + // creating a new session. Still advance nextRunAt so we don't busy + // loop on the same tick. + // + // `overlapMode === "queue"` is intentionally handled by the existing + // TodoSupervisor queue: we always insert the new session and call + // supervisor.start(), which enqueues when another session is already + // running instead of spawning in parallel. No extra branch needed here. + if (schedule.overlapMode === "skip" && schedule.lastRunSessionId) { + const prev = sessionStore.get(schedule.lastRunSessionId); + if (isSessionActive(prev)) { + store.recordRun({ + id: schedule.id, + sessionId: schedule.lastRunSessionId, + firedAt, + nextRunAt, + }); + this.emit({ + scheduleId: schedule.id, + scheduleName: schedule.name, + kind: "skipped", + sessionId: null, + message: "前回の実行が終わっていないためスキップしました", + firedAt, + }); + return; + } + } + + // Resolve the workspace to attach the fired session to. If the + // schedule was saved project-only (workspaceId = null), fall back + // to the project's branch-type workspace, materializing one if the + // project doesn't already have it. That keeps `todo_sessions` + // workspaceId NOT NULL intact while letting the UI expose the + // "run on project main repo" mental model. + const fireWorkspaceId = + schedule.workspaceId ?? + ensureProjectBranchWorkspaceId(schedule.projectId); + if (!fireWorkspaceId) { + store.recordRun({ + id: schedule.id, + sessionId: null, + firedAt, + nextRunAt, + }); + this.emit({ + scheduleId: schedule.id, + scheduleName: schedule.name, + kind: "failed", + sessionId: null, + message: "プロジェクトのワークスペースを用意できませんでした", + firedAt, + }); + return; + } + + const worktreePath = resolveWorktreePath(fireWorkspaceId); + if (!worktreePath) { + store.recordRun({ + id: schedule.id, + sessionId: null, + firedAt, + nextRunAt, + }); + this.emit({ + scheduleId: schedule.id, + scheduleName: schedule.name, + kind: "failed", + sessionId: null, + message: "ワークスペースのパスが解決できませんでした", + firedAt, + }); + return; + } + + // Opt-in: freshen the project main repo before firing. Applies + // only when the schedule itself has no workspaceId (we refuse to + // yank HEAD on a worktree workspace — that would rewrite someone + // else's working branch). If the tree is dirty we deliberately + // skip the fire rather than stash the user's work. + if (schedule.autoSyncBeforeFire && schedule.workspaceId === null) { + const syncResult = await autoSyncProjectMain(worktreePath); + if (syncResult.kind !== "ok") { + store.recordRun({ + id: schedule.id, + sessionId: null, + firedAt, + nextRunAt, + }); + this.emit({ + scheduleId: schedule.id, + scheduleName: schedule.name, + kind: syncResult.kind === "dirty" ? "skipped" : "failed", + sessionId: null, + message: syncResult.message, + firedAt, + }); + return; + } + } + + try { + const sessionId = crypto.randomUUID(); + const artifactPath = supervisor.computeArtifactPath({ + sessionId, + workspaceId: fireWorkspaceId, + }); + const inserted = sessionStore.insertQueuedFromTemplate({ + id: sessionId, + projectId: schedule.projectId, + workspaceId: fireWorkspaceId, + title: schedule.title, + description: schedule.description, + goal: schedule.goal, + verifyCommand: schedule.verifyCommand, + maxIterations: schedule.maxIterations, + maxWallClockSec: schedule.maxWallClockSec, + customSystemPrompt: schedule.customSystemPrompt, + claudeModel: schedule.claudeModel, + claudeEffort: schedule.claudeEffort, + artifactPath, + }); + supervisor.prepareArtifacts(inserted); + void supervisor.start(inserted.id).catch((err) => { + const failureMessage = + err instanceof Error ? err.message : "Unknown error"; + console.warn( + `[todo-scheduler] supervisor.start failed for ${inserted.id}:`, + err, + ); + // The triggered toast has already fired, so publish a + // follow-up failed event. Otherwise the UI would claim the + // fire succeeded even though the supervisor never ran. + this.emit({ + scheduleId: schedule.id, + scheduleName: schedule.name, + kind: "failed", + sessionId: inserted.id, + message: `実行開始に失敗しました: ${failureMessage}`, + firedAt, + }); + }); + store.recordRun({ + id: schedule.id, + sessionId: inserted.id, + firedAt, + nextRunAt, + }); + this.emit({ + scheduleId: schedule.id, + scheduleName: schedule.name, + kind: "triggered", + sessionId: inserted.id, + message: null, + firedAt, + }); + } catch (error) { + const message = error instanceof Error ? error.message : "Unknown error"; + store.recordRun({ + id: schedule.id, + sessionId: null, + firedAt, + nextRunAt, + }); + this.emit({ + scheduleId: schedule.id, + scheduleName: schedule.name, + kind: "failed", + sessionId: null, + message, + firedAt, + }); + } + } + + private emit(event: TodoScheduleFireEvent): void { + getTodoScheduleStore().emitFire(event); + } +} + +let instance: TodoScheduler | null = null; + +export function getTodoScheduler(): TodoScheduler { + if (!instance) { + instance = new TodoScheduler(); + } + return instance; +} diff --git a/apps/desktop/src/main/todo-agent/session-store.ts b/apps/desktop/src/main/todo-agent/session-store.ts new file mode 100644 index 00000000000..3476449b30a --- /dev/null +++ b/apps/desktop/src/main/todo-agent/session-store.ts @@ -0,0 +1,615 @@ +import { EventEmitter } from "node:events"; +import { existsSync, mkdirSync, readFileSync } from "node:fs"; +import { appendFile } from "node:fs/promises"; +import path from "node:path"; +import { + projects, + type SelectTodoSession, + todoSessions, + workspaces, + worktrees, +} from "@superset/local-db"; +import { + and, + desc, + eq, + inArray, + isNull, + lte, + not, + notInArray, +} from "drizzle-orm"; +import { localDb } from "main/lib/local-db"; +import type { + TodoSessionListEntry, + TodoSessionStateEvent, + TodoStreamEvent, + TodoStreamUpdate, +} from "./types"; + +export type { TodoSessionListEntry }; + +const STREAM_JSONL_FILE = "stream.jsonl"; + +/** + * Cap on the number of stream events we keep in memory per session. Enough + * to show "the whole current run" in the UI without letting an unbounded + * stream balloon process memory. Older events are dropped from the head. + */ +const STREAM_EVENT_BUFFER_CAP = 500; + +/** + * In-memory session bookkeeping + persistence helpers for the TODO agent. + * + * All state transitions go through `updateSession` so we have exactly one + * place that writes to the DB and emits the state event consumed by the + * tRPC subscription. + */ +class TodoSessionStore { + private readonly emitter = new EventEmitter(); + /** In-memory per-session stream event buffer. Not persisted. */ + private readonly streamBuffers = new Map(); + /** + * Cached absolute artifact path per sessionId. The supervisor + * primes this at the start of each run via `setArtifactPathCache` + * so append-hot stream writes do not need to hit SQLite on every + * event. + */ + private readonly artifactPathCache = new Map(); + /** + * Per-session serialized append chain. `appendFile` from + * node:fs/promises is async, and bursts of stream events can race + * and write out-of-order. We sequence them per session via a + * promise chain — cheap and avoids reordering the JSONL. + */ + private readonly persistQueues = new Map>(); + + constructor() { + this.emitter.setMaxListeners(0); + // Rehydration is now delegated to the todo-agent daemon so sessions + // that survive the main process close aren't mistakenly marked + // failed. The daemon calls `rehydrateStrandedSessionsExcept` with + // the set of sessions it's actively driving. + } + + setArtifactPathCache(sessionId: string, artifactPath: string | null): void { + if (artifactPath?.startsWith("/")) { + this.artifactPathCache.set(sessionId, artifactPath); + // Make sure the directory exists once, up-front, so the async + // appendFile calls below never race on mkdir. + try { + mkdirSync(artifactPath, { recursive: true }); + } catch (error) { + console.warn("[todo-agent] artifact mkdir failed", error); + } + } else { + this.artifactPathCache.delete(sessionId); + } + } + + appendStreamEvents(sessionId: string, events: TodoStreamEvent[]): void { + if (events.length === 0) return; + const buffer = this.streamBuffers.get(sessionId) ?? []; + buffer.push(...events); + // Drop from the head if we are over the cap so the tail (most + // recent activity) is always preserved. + if (buffer.length > STREAM_EVENT_BUFFER_CAP) { + buffer.splice(0, buffer.length - STREAM_EVENT_BUFFER_CAP); + } + this.streamBuffers.set(sessionId, buffer); + + // Persist every event to disk so that sessions stay reviewable + // across app restarts and after the in-memory cap evicts them. + // The file lives inside the per-session artifact dir we already + // created via `prepareArtifacts`, so cleanup is automatic when + // the session (and its artifact dir) are deleted. + this.persistStreamEvents(sessionId, events); + + const update: TodoStreamUpdate = { sessionId, events }; + this.emitter.emit(`stream:${sessionId}`, update); + } + + getStreamEvents(sessionId: string): TodoStreamEvent[] { + const inMemory = this.streamBuffers.get(sessionId); + if (inMemory && inMemory.length > 0) return [...inMemory]; + // Fall back to the JSONL file — this is how we hydrate a past + // session whose in-memory buffer was cleared (either by app + // restart or by the eviction cap). + return this.loadStreamEventsFromDisk(sessionId); + } + + clearStreamEvents(sessionId: string): void { + this.streamBuffers.delete(sessionId); + } + + private persistStreamEvents( + sessionId: string, + events: TodoStreamEvent[], + ): void { + // Fast-path: use the cached absolute path the supervisor primed + // when the run started. Falls back to a DB read only when no + // cache entry exists (e.g. a historical session being replayed + // outside of a run). + let dir = this.artifactPathCache.get(sessionId); + if (!dir) { + const session = this.get(sessionId); + dir = session?.artifactPath; + if (dir?.startsWith("/")) { + this.artifactPathCache.set(sessionId, dir); + } + } + if (!dir || !dir.startsWith("/")) return; + const filePath = path.join(dir, STREAM_JSONL_FILE); + const body = `${events.map((e) => JSON.stringify(e)).join("\n")}\n`; + + // Chain async appends so bursty event streams stay ordered in + // the JSONL file and main process is not blocked on fs I/O. + const previous = this.persistQueues.get(sessionId) ?? Promise.resolve(); + const nextTask = previous + .catch(() => {}) + .then(() => appendFile(filePath, body, "utf8")) + .catch((error) => { + console.warn("[todo-agent] stream persist failed", error); + }); + this.persistQueues.set(sessionId, nextTask); + } + + /** + * On daemon startup, any session that was mid-run when the previous + * daemon died will still have a non-terminal status + * (`preparing` / `running` / `verifying`) in the DB even though the + * daemon has no record of it. Flip those to `failed` so the user + * can re-run or delete from the UI. + * + * `activeSessionIds` is the set of sessions the daemon is **currently** + * driving (ActiveRun map keys). Those are skipped so a running + * daemon that reconnects doesn't stomp on its own live work. + * + * Safe to call multiple times; behaves as a no-op when nothing is + * stranded. Returns the number of rows rehydrated. + */ + rehydrateStrandedSessionsExcept(activeSessionIds: readonly string[]): number { + try { + const baseCondition = inArray(todoSessions.status, [ + "preparing", + "running", + "verifying", + ]); + const whereClause = + activeSessionIds.length > 0 + ? and( + baseCondition, + notInArray(todoSessions.id, activeSessionIds as string[]), + ) + : baseCondition; + const stranded = localDb + .update(todoSessions) + .set({ + status: "failed", + phase: "failed", + verdictPassed: false, + verdictReason: + "前回の実行が中断されました(daemon が停止)。再実行するか削除してください。", + completedAt: Date.now(), + updatedAt: Date.now(), + }) + .where(whereClause) + .returning() + .all(); + for (const row of stranded) { + this.emit(row); + } + if (stranded.length > 0) { + console.log( + `[todo-agent] rehydrated ${stranded.length} stranded session(s)`, + ); + } + return stranded.length; + } catch (error) { + console.warn("[todo-agent] rehydrate on startup failed", error); + return 0; + } + } + + private loadStreamEventsFromDisk(sessionId: string): TodoStreamEvent[] { + try { + const session = this.get(sessionId); + const dir = session?.artifactPath; + if (!dir || !dir.startsWith("/")) return []; + const filePath = path.join(dir, STREAM_JSONL_FILE); + if (!existsSync(filePath)) return []; + const text = readFileSync(filePath, "utf8"); + const lines = text.split("\n").filter((l) => l.length > 0); + const events: TodoStreamEvent[] = []; + for (const line of lines) { + try { + const parsed = JSON.parse(line) as TodoStreamEvent; + if ( + parsed && + typeof parsed === "object" && + typeof parsed.id === "string" && + typeof parsed.kind === "string" + ) { + events.push(parsed); + } + } catch { + // Skip malformed line. + } + } + return events; + } catch (error) { + console.warn("[todo-agent] stream load failed", error); + return []; + } + } + + subscribeStream( + sessionId: string, + handler: (update: TodoStreamUpdate) => void, + ): () => void { + const key = `stream:${sessionId}`; + this.emitter.on(key, handler); + return () => { + this.emitter.off(key, handler); + }; + } + + insert( + row: Omit & { + id?: string; + }, + ): SelectTodoSession { + const inserted = localDb.insert(todoSessions).values(row).returning().get(); + this.emit(inserted); + return inserted; + } + + /** + * Insert a fresh `queued` session from a user-authored template (TODO + * composer, schedule fire, or anywhere else that starts a new session + * from scratch). Centralizing this here keeps the full TodoSession row + * shape in one place — otherwise any new field on `todo_sessions` has + * to be remembered in every call site. + */ + insertQueuedFromTemplate(template: { + id: string; + projectId: string | null | undefined; + workspaceId: string; + title: string; + description: string; + goal?: string | null; + verifyCommand?: string | null; + maxIterations: number; + maxWallClockSec: number; + customSystemPrompt?: string | null; + claudeModel?: string | null; + claudeEffort?: string | null; + agentKind?: string | null; + codexModel?: string | null; + codexEffort?: string | null; + crushModel?: string | null; + remoteControlEnabled?: boolean; + artifactPath: string; + }): SelectTodoSession { + return this.insert({ + id: template.id, + projectId: template.projectId ?? null, + workspaceId: template.workspaceId, + title: template.title, + description: template.description, + goal: template.goal ?? null, + verifyCommand: template.verifyCommand ?? null, + maxIterations: template.maxIterations, + maxWallClockSec: template.maxWallClockSec, + status: "queued", + phase: "queued", + iteration: 0, + attachedPaneId: null, + attachedTabId: null, + claudeSessionId: null, + finalAssistantText: null, + totalCostUsd: null, + totalNumTurns: null, + pendingIntervention: null, + startHeadSha: null, + customSystemPrompt: template.customSystemPrompt ?? null, + claudeModel: template.claudeModel ?? null, + claudeEffort: template.claudeEffort ?? null, + agentKind: template.agentKind ?? "claude", + codexModel: template.codexModel ?? null, + codexEffort: template.codexEffort ?? null, + crushModel: template.crushModel ?? null, + remoteControlEnabled: template.remoteControlEnabled ?? false, + verdictPassed: null, + verdictReason: null, + verdictFailingTest: null, + artifactPath: template.artifactPath, + waitingUntil: null, + waitingReason: null, + startedAt: null, + completedAt: null, + }); + } + + get(sessionId: string): SelectTodoSession | undefined { + return localDb + .select() + .from(todoSessions) + .where(eq(todoSessions.id, sessionId)) + .get(); + } + + listForWorkspace(workspaceId: string): SelectTodoSession[] { + return localDb + .select() + .from(todoSessions) + .where(eq(todoSessions.workspaceId, workspaceId)) + .orderBy(desc(todoSessions.createdAt)) + .all(); + } + + /** + * Sessions parked in `waiting` whose `waitingUntil` deadline has + * passed. Drives the scheduler tick that resumes `ScheduleWakeup`- + * paused sessions once their delay elapses. + */ + listWaitingDue(nowMs: number): SelectTodoSession[] { + return localDb + .select() + .from(todoSessions) + .where( + and( + eq(todoSessions.status, "waiting"), + lte(todoSessions.waitingUntil, nowMs), + ), + ) + .all(); + } + + /** + * Atomically flip a row from `waiting` → `queued` and clear the + * parking fields. Returns the updated row (so callers can tell they + * won the claim) or undefined when the session has since moved to a + * different status — typically because the user clicked Abort while + * the scheduler tick was already in flight. Used as the race guard + * before the scheduler hands a session back to the supervisor. + */ + claimWaitingForResume(sessionId: string): SelectTodoSession | undefined { + const updated = localDb + .update(todoSessions) + .set({ + status: "queued", + phase: "queued", + waitingUntil: null, + waitingReason: null, + updatedAt: Date.now(), + }) + .where( + and(eq(todoSessions.id, sessionId), eq(todoSessions.status, "waiting")), + ) + .returning() + .get(); + if (updated) this.emit(updated); + return updated; + } + + /** + * Cross-workspace list used by the Agent-Manager-style view. Joins in + * workspace + project names so the manager can group and label rows + * without issuing N extra queries. Deleted workspaces + * (`deletingAt IS NOT NULL`) are filtered out. + */ + listAll(): TodoSessionListEntry[] { + const rows = localDb + .select({ + session: todoSessions, + workspaceName: workspaces.name, + workspaceBranch: workspaces.branch, + workspaceDeletingAt: workspaces.deletingAt, + projectName: projects.name, + }) + .from(todoSessions) + .leftJoin(workspaces, eq(workspaces.id, todoSessions.workspaceId)) + .leftJoin(projects, eq(projects.id, workspaces.projectId)) + .where(isNull(workspaces.deletingAt)) + .orderBy(desc(todoSessions.createdAt)) + .all(); + return rows.map((row) => ({ + ...row.session, + workspaceName: row.workspaceName ?? null, + workspaceBranch: row.workspaceBranch ?? null, + projectName: row.projectName ?? null, + })); + } + + update( + sessionId: string, + patch: Partial, + ): SelectTodoSession | undefined { + const next = { + ...patch, + updatedAt: Date.now(), + }; + const updated = localDb + .update(todoSessions) + .set(next) + .where(eq(todoSessions.id, sessionId)) + .returning() + .get(); + if (updated) this.emit(updated); + return updated; + } + + remove(sessionId: string): boolean { + const result = localDb + .delete(todoSessions) + .where(eq(todoSessions.id, sessionId)) + .run(); + this.clearStreamEvents(sessionId); + return result.changes > 0; + } + + subscribe( + sessionId: string, + handler: (event: TodoSessionStateEvent) => void, + ): () => void { + const key = `session:${sessionId}`; + this.emitter.on(key, handler); + return () => { + this.emitter.off(key, handler); + }; + } + + private emit(session: SelectTodoSession): void { + const event: TodoSessionStateEvent = { + sessionId: session.id, + session, + }; + this.emitter.emit(`session:${session.id}`, event); + } + + /** + * Bridge hook used by the daemon client in the main process. + * The daemon writes to SQLite in its own process, so this store + * (living in the main process) does not observe those writes + * directly. The client re-emits them via this method so tRPC + * subscribers receive the update just like a local write. + */ + externalEmit(session: SelectTodoSession): void { + this.emit(session); + } + + /** + * Same idea as {@link externalEmit} but for stream-event appends: + * updates the in-memory buffer so `getStreamEvents` stays warm, + * then fans the update out to any subscribers. + */ + externalEmitStream(sessionId: string, events: TodoStreamEvent[]): void { + if (events.length === 0) return; + const buffer = this.streamBuffers.get(sessionId) ?? []; + buffer.push(...events); + if (buffer.length > STREAM_EVENT_BUFFER_CAP) { + buffer.splice(0, buffer.length - STREAM_EVENT_BUFFER_CAP); + } + this.streamBuffers.set(sessionId, buffer); + const update: TodoStreamUpdate = { sessionId, events }; + this.emitter.emit(`stream:${sessionId}`, update); + } +} + +let singleton: TodoSessionStore | undefined; + +export function getTodoSessionStore(): TodoSessionStore { + if (!singleton) singleton = new TodoSessionStore(); + return singleton; +} + +/** + * Resolve the absolute filesystem path a TODO session should run in for a + * given workspace. For `type="worktree"` workspaces this is the worktree + * path; for `type="branch"` workspaces there is no worktree row and we + * fall back to the project's `mainRepoPath`, matching the resolution + * strategy used by the existing terminal runtime in + * `workspace-terminal-context.ts`. Returns undefined only when the + * workspace does not exist. + */ +export function resolveWorktreePath(workspaceId: string): string | undefined { + const row = localDb + .select({ + worktreePath: worktrees.path, + mainRepoPath: projects.mainRepoPath, + }) + .from(workspaces) + .leftJoin(projects, eq(projects.id, workspaces.projectId)) + .leftJoin(worktrees, eq(worktrees.id, workspaces.worktreeId)) + .where(eq(workspaces.id, workspaceId)) + .get(); + return row?.worktreePath ?? row?.mainRepoPath ?? undefined; +} + +/** + * Ensure a project has its `type="branch"` workspace (the row that maps + * to `mainRepoPath`). Creates one lazily if missing so schedules with + * no explicit workspaceId can attach their sessions to something real. + * Returns the workspace id, or undefined if the project itself is gone. + */ +export function ensureProjectBranchWorkspaceId( + projectId: string, +): string | undefined { + const existing = localDb + .select({ id: workspaces.id }) + .from(workspaces) + .where( + and( + eq(workspaces.projectId, projectId), + eq(workspaces.type, "branch"), + isNull(workspaces.deletingAt), + ), + ) + .get(); + if (existing) return existing.id; + + const project = localDb + .select({ + defaultBranch: projects.defaultBranch, + }) + .from(projects) + .where(eq(projects.id, projectId)) + .get(); + if (!project) return undefined; + + const branchName = project.defaultBranch ?? "main"; + const inserted = localDb + .insert(workspaces) + .values({ + projectId, + type: "branch", + branch: branchName, + name: branchName, + tabOrder: 0, + }) + .onConflictDoNothing() + .returning({ id: workspaces.id }) + .get(); + + if (inserted) { + // Mirror the standard workspace-create flow: bump every other + // workspace in the project by +1 so the new branch workspace + // lands uniquely at tabOrder 0 instead of colliding with an + // existing 0-ordered worktree (which would yield a + // non-deterministic sort in the sidebar). + const siblings = localDb + .select({ id: workspaces.id, tabOrder: workspaces.tabOrder }) + .from(workspaces) + .where( + and( + eq(workspaces.projectId, projectId), + not(eq(workspaces.id, inserted.id)), + isNull(workspaces.deletingAt), + ), + ) + .all(); + for (const sibling of siblings) { + localDb + .update(workspaces) + .set({ tabOrder: (sibling.tabOrder ?? 0) + 1 }) + .where(eq(workspaces.id, sibling.id)) + .run(); + } + return inserted.id; + } + + // Race: another path materialized it between our check and insert. + const raced = localDb + .select({ id: workspaces.id }) + .from(workspaces) + .where( + and( + eq(workspaces.projectId, projectId), + eq(workspaces.type, "branch"), + isNull(workspaces.deletingAt), + ), + ) + .get(); + return raced?.id; +} diff --git a/apps/desktop/src/main/todo-agent/sessions-cleanup.ts b/apps/desktop/src/main/todo-agent/sessions-cleanup.ts new file mode 100644 index 00000000000..552024b18ac --- /dev/null +++ b/apps/desktop/src/main/todo-agent/sessions-cleanup.ts @@ -0,0 +1,83 @@ +import { existsSync, rmSync } from "node:fs"; +import { todoSessions } from "@superset/local-db"; +import { and, inArray, sql } from "drizzle-orm"; +import { localDb } from "main/lib/local-db"; +import { getTodoSettings } from "./settings"; + +const TERMINAL_STATUSES = ["done", "failed", "aborted", "escalated"] as const; + +/** + * One-shot sweep of old terminal TODO sessions at app startup. + * + * Respects `todo-agent-settings.sessionRetentionDays`: + * - 0 (default) → no automatic deletion (legacy behavior) + * - N > 0 → delete sessions whose `completedAt` (or createdAt + * fallback for rows that finished without a timestamp) + * is older than N days AND whose status is terminal. + * + * Running / queued / paused / verifying / preparing sessions are NEVER + * touched — they're active user work. The session's artifact directory + * (`artifactPath`) is removed alongside the row. + */ +export function cleanupOldSessions(): void { + try { + const { sessionRetentionDays } = getTodoSettings(); + if (sessionRetentionDays <= 0) return; + + const cutoffMs = Date.now() - sessionRetentionDays * 24 * 60 * 60 * 1000; + + // The retention cutoff must be based on when the session finished, + // not when it started. A session created weeks ago but completed + // today is still "fresh" from the user's perspective. Fall back + // to createdAt only for the rare terminal row with no + // completedAt timestamp. + const candidates = localDb + .select({ + id: todoSessions.id, + artifactPath: todoSessions.artifactPath, + }) + .from(todoSessions) + .where( + and( + inArray(todoSessions.status, [...TERMINAL_STATUSES]), + sql`COALESCE(${todoSessions.completedAt}, ${todoSessions.createdAt}) < ${cutoffMs}`, + ), + ) + .all(); + + if (candidates.length === 0) return; + + // Delete rows in a single DB call so we don't thrash the journal + // if the retention window has hundreds of pending deletes. + localDb + .delete(todoSessions) + .where( + inArray( + todoSessions.id, + candidates.map((row) => row.id), + ), + ) + .run(); + + for (const row of candidates) { + if (!row.artifactPath) continue; + try { + if (existsSync(row.artifactPath)) { + rmSync(row.artifactPath, { recursive: true, force: true }); + } + } catch (error) { + console.warn( + "[todo-agent] failed to remove session artifact:", + row.artifactPath, + error, + ); + } + } + + console.log( + `[todo-agent] cleaned up ${candidates.length} session(s) older than ${sessionRetentionDays} days`, + ); + } catch (error) { + console.warn("[todo-agent] session cleanup failed:", error); + } +} diff --git a/apps/desktop/src/main/todo-agent/settings.ts b/apps/desktop/src/main/todo-agent/settings.ts new file mode 100644 index 00000000000..5f5bd6a551b --- /dev/null +++ b/apps/desktop/src/main/todo-agent/settings.ts @@ -0,0 +1,111 @@ +import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs"; +import { homedir } from "node:os"; +import path from "node:path"; +import { SUPERSET_DIR_NAME } from "shared/constants"; +import { type TodoSettings, todoSettingsSchema } from "./types"; + +const SETTINGS_FILE = "todo-agent-settings.json"; + +/** + * Resolve the settings directory without importing `electron`. The TODO + * agent daemon (ELECTRON_RUN_AS_NODE=1) must be able to read the same + * file the main process writes, and `app.getPath("userData")` is not + * available in that context. Using the shared SUPERSET_HOME_DIR keeps + * both processes in sync. + * + * Electron's `app` is imported through a try/require so a missing or + * stubbed electron module in the daemon does not crash on module load. + */ +function getLegacyUserDataDir(): string | null { + try { + const required = (Function("return require") as () => NodeRequire)()( + "electron", + ) as typeof import("electron"); + const app = required?.app; + if (app && typeof app.getPath === "function") { + return app.getPath("userData"); + } + return null; + } catch { + return null; + } +} + +function getSettingsDir(): string { + const base = + process.env.SUPERSET_HOME_DIR || path.join(homedir(), SUPERSET_DIR_NAME); + return path.join(base, "todo-agent"); +} + +function getSettingsPath(): string { + const dir = getSettingsDir(); + mkdirSync(dir, { recursive: true }); + const filePath = path.join(dir, SETTINGS_FILE); + // One-shot migration: if the new location is empty but the old + // userData location has a settings file, copy it over so user + // customizations aren't lost when moving to the shared directory. + if (!existsSync(filePath)) { + const legacyBase = getLegacyUserDataDir(); + if (legacyBase) { + const legacyPath = path.join(legacyBase, "todo-agent", SETTINGS_FILE); + if (existsSync(legacyPath)) { + try { + const raw = readFileSync(legacyPath, "utf8"); + writeFileSync(filePath, raw, "utf8"); + } catch { + // best-effort + } + } + } + } + return filePath; +} + +const DEFAULT_SETTINGS: TodoSettings = { + defaultMaxIterations: 10, + defaultMaxWallClockMin: 30, + maxConcurrentTasks: 1, + sessionRetentionDays: 0, + defaultAgentKind: "claude", + defaultClaudeModel: null, + defaultClaudeEffort: null, + defaultCodexModel: null, + defaultCodexEffort: null, + defaultCrushModel: null, +}; + +let cached: TodoSettings | null = null; + +export function getTodoSettings(): TodoSettings { + if (cached) return cached; + const filePath = getSettingsPath(); + if (!existsSync(filePath)) { + cached = { ...DEFAULT_SETTINGS }; + return cached; + } + try { + const raw = JSON.parse(readFileSync(filePath, "utf8")); + cached = todoSettingsSchema.parse(raw); + return cached; + } catch { + cached = { ...DEFAULT_SETTINGS }; + return cached; + } +} + +export function updateTodoSettings(patch: Partial): TodoSettings { + const current = getTodoSettings(); + const next = todoSettingsSchema.parse({ ...current, ...patch }); + cached = next; + writeFileSync(getSettingsPath(), JSON.stringify(next, null, 2), "utf8"); + return next; +} + +/** + * Force-refresh the in-memory cache. The daemon uses this when it + * receives a `settingsChanged` RPC so subsequent `getTodoSettings()` + * calls observe the latest on-disk value written by the main process. + */ +export function invalidateTodoSettingsCache(): void { + cached = null; +} diff --git a/apps/desktop/src/main/todo-agent/supervisor.ts b/apps/desktop/src/main/todo-agent/supervisor.ts new file mode 100644 index 00000000000..5ea996798c3 --- /dev/null +++ b/apps/desktop/src/main/todo-agent/supervisor.ts @@ -0,0 +1,186 @@ +import { mkdirSync, writeFileSync } from "node:fs"; +import path from "node:path"; +import type { SelectTodoSession } from "@superset/local-db"; +import { getTodoDaemonClient } from "main/lib/todo-daemon/client"; +import { getTodoSessionDebugData, todoAgentMainDebug } from "./debug"; +import { getTodoSessionStore, resolveWorktreePath } from "./session-store"; +import { TODO_ARTIFACT_SUBDIR } from "./types"; + +/** + * Main-process façade for the TODO supervisor. + * + * The heavy lifting (spawning `claude -p`, driving the iteration loop, + * parsing stream-json, updating `todo_sessions`) lives in the + * `todo-daemon` process so in-flight sessions survive app restarts — + * see issue #237. This class proxies each public method to the daemon + * over the daemon-client socket. Pure-filesystem helpers + * (`computeArtifactPath`, `prepareArtifacts`) stay in the main process + * because tRPC calls them before the session ever leaves the UI path. + */ +class TodoSupervisor { + computeArtifactPath(params: { + sessionId: string; + workspaceId: string; + }): string { + const worktreePath = resolveWorktreePath(params.workspaceId); + if (!worktreePath) { + throw new Error( + `todo-agent: workspace ${params.workspaceId} has no resolvable path`, + ); + } + return path.join(worktreePath, TODO_ARTIFACT_SUBDIR, params.sessionId); + } + + prepareArtifacts(session: SelectTodoSession): string { + const dir = session.artifactPath; + mkdirSync(dir, { recursive: true }); + writeFileSync(path.join(dir, "goal.md"), renderGoalDoc(session), "utf8"); + return dir; + } + + async start( + sessionId: string, + options?: { fromScheduledWakeup?: boolean }, + ): Promise { + const current = getTodoSessionStore().get(sessionId); + todoAgentMainDebug.info( + "todo-supervisor-start", + { + sessionId, + fromScheduledWakeup: options?.fromScheduledWakeup ?? false, + ...(current ? getTodoSessionDebugData(current) : {}), + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-supervisor-start"], + }, + ); + try { + await getTodoDaemonClient().start({ + sessionId, + fromScheduledWakeup: options?.fromScheduledWakeup, + }); + todoAgentMainDebug.info( + "todo-supervisor-start-success", + { + sessionId, + fromScheduledWakeup: options?.fromScheduledWakeup ?? false, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-supervisor-start-success"], + }, + ); + } catch (error) { + // The tRPC router flips the session to `preparing` before + // fire-and-forgetting us, so a daemon spawn/connect/auth + // failure here would otherwise leave the row stuck in + // `preparing` with no way for the UI to recover. Persist a + // terminal failure state so the user sees the problem and + // can retry or delete the session. + const reason = error instanceof Error ? error.message : String(error); + console.warn("[todo-supervisor] daemon start failed", error); + todoAgentMainDebug.captureException( + error, + "todo-supervisor-start-failed", + { + sessionId, + fromScheduledWakeup: options?.fromScheduledWakeup ?? false, + errorMessage: reason, + }, + { + fingerprint: ["todo.agent.main", "todo-supervisor-start-failed"], + }, + ); + try { + const store = getTodoSessionStore(); + const current = store.get(sessionId); + if ( + current && + (current.status === "preparing" || + current.status === "queued" || + current.status === "waiting") + ) { + store.update(sessionId, { + status: "failed", + phase: "failed", + verdictPassed: false, + verdictReason: `todo-daemon を起動できませんでした: ${reason}`, + completedAt: Date.now(), + }); + } + } catch (persistError) { + console.warn( + "[todo-supervisor] failed to persist daemon failure state", + persistError, + ); + } + throw error; + } + } + + abort(sessionId: string): void { + void getTodoDaemonClient() + .abort({ sessionId }) + .catch((error) => { + console.warn("[todo-supervisor] daemon abort failed", error); + }); + } + + queueIntervention(sessionId: string, data: string): void { + void getTodoDaemonClient() + .queueIntervention({ sessionId, data }) + .catch((error) => { + console.warn( + "[todo-supervisor] daemon queueIntervention failed", + error, + ); + }); + } + + handleSettingsChanged(): void { + void getTodoDaemonClient() + .settingsChanged() + .catch((error) => { + console.warn("[todo-supervisor] daemon settingsChanged failed", error); + }); + } +} + +let supervisor: TodoSupervisor | undefined; +export function getTodoSupervisor(): TodoSupervisor { + if (!supervisor) supervisor = new TodoSupervisor(); + return supervisor; +} + +function renderGoalDoc(session: SelectTodoSession): string { + const lines: string[] = [ + session.title ? `# TODO: ${session.title}` : "# TODO", + "", + "## やって欲しいこと", + session.description, + "", + "## ゴール(受け入れ条件)", + session.goal?.trim() || + "(未指定。上記『やって欲しいこと』が完了した時点で完了とみなす)", + "", + ]; + if (session.verifyCommand) { + lines.push( + "## Verify コマンド", + "```sh", + session.verifyCommand, + "```", + "", + `予算: ${session.maxIterations} イテレーション / ${session.maxWallClockSec} 秒`, + "", + ); + } else { + lines.push( + "## モード", + "単発タスク。外部 verify は行いません。ゴールを達成したと判断したらターンを終えて停止してください。", + "", + ); + } + return lines.join("\n"); +} diff --git a/apps/desktop/src/main/todo-agent/trpc-router.ts b/apps/desktop/src/main/todo-agent/trpc-router.ts new file mode 100644 index 00000000000..f1b1336485e --- /dev/null +++ b/apps/desktop/src/main/todo-agent/trpc-router.ts @@ -0,0 +1,1092 @@ +import { randomUUID } from "node:crypto"; +import { mkdirSync, readFileSync, rmSync, writeFileSync } from "node:fs"; +import path from "node:path"; +import { todoPromptPresets } from "@superset/local-db"; +import { TRPCError } from "@trpc/server"; +import { observable } from "@trpc/server/observable"; +import { desc, eq } from "drizzle-orm"; +import { app } from "electron"; +import { publicProcedure, router } from "lib/trpc"; +import { localDb } from "main/lib/local-db"; +import { workspaceInitManager } from "main/lib/workspace-init-manager"; +import { z } from "zod"; +import { getTodoSessionDebugData, todoAgentMainDebug } from "./debug"; +import { describeEnhanceFailure, enhanceTodoText } from "./enhance-text"; +import { + getSessionFileDiff, + getSessionGitSnapshot, + type SessionDiffScope, +} from "./git-status"; +import { + readTodoSessionRuntimeConfig, + writeTodoSessionRuntimeConfig, +} from "./runtime-config"; +import { getTodoScheduleStore } from "./schedule-store"; +import { computeNextRunAt, getTodoScheduler } from "./scheduler"; +import { getTodoSessionStore, resolveWorktreePath } from "./session-store"; +import { getTodoSettings, updateTodoSettings } from "./settings"; +import { getTodoSupervisor } from "./supervisor"; +import { + TODO_ARTIFACT_SUBDIR, + type TodoScheduleFireEvent, + type TodoSessionStateEvent, + type TodoStreamUpdate, + todoClaudeEffortSchema, + todoClaudeModelSchema, + todoCreateInputSchema, + todoEnhanceTextInputSchema, + todoPresetCreateInputSchema, + todoPresetUpdateInputSchema, + todoScheduleCreateInputSchema, + todoScheduleUpdateInputSchema, + todoSendInputSchema, + todoSettingsUpdateSchema, +} from "./types"; + +/** + * tRPC router for the fork-local TODO autonomous agent feature. + * + * Exposed as `todoAgent.*` on the app router. + */ +interface CrushModelsCache { + values: string[]; + expiresAt: number; + inflight: Promise | null; +} + +const CRUSH_MODELS_CACHE_TTL_MS = 5 * 60_000; +const crushModelsCache: CrushModelsCache = { + values: [], + expiresAt: 0, + inflight: null, +}; + +export const createTodoAgentRouter = () => { + return router({ + create: publicProcedure + .input(todoCreateInputSchema) + .mutation(async ({ input }) => { + todoAgentMainDebug.info( + "todo-create-request", + { + workspaceId: input.workspaceId, + projectId: input.projectId ?? null, + ptyEnabled: input.ptyEnabled, + remoteControlEnabled: input.remoteControlEnabled, + maxIterations: input.maxIterations, + maxWallClockSec: input.maxWallClockSec, + hasVerify: (input.verifyCommand?.trim().length ?? 0) > 0, + hasCustomSystemPrompt: + (input.customSystemPrompt?.trim().length ?? 0) > 0, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-create-request"], + }, + ); + try { + // When the UI creates a fresh workspace+worktree immediately + // before creating the TODO (the "新しい worktree を作成して実行" + // checkbox), `workspaces.create` returns while `git worktree + // add` is still running in the background. Materializing the + // artifact directory now would mkdir inside the future + // worktree path, leaving it non-empty and causing the + // subsequent `git worktree add` to fail — the symptom users + // see as the sidebar error + "ブランチ取得中…" that never + // resolves. Block until init is done (or already no-op) so + // prepareArtifacts runs against a real worktree. + // + // `waitForInit` has a 30s internal timeout that resolves + // silently even if init is still running, so a slow + // fetch/clone path could still slip through. Loop on the + // `isInitializing` flag so we really block until the job + // reaches a terminal state, up to a generous ceiling. + const INIT_WAIT_STEP_MS = 30_000; + const INIT_WAIT_CEILING_MS = 10 * 60_000; + const waitStartedAt = Date.now(); + while (workspaceInitManager.isInitializing(input.workspaceId)) { + if (Date.now() - waitStartedAt > INIT_WAIT_CEILING_MS) { + throw new TRPCError({ + code: "TIMEOUT", + message: `todo-agent: workspace ${input.workspaceId} の初期化が時間内に終わりませんでした`, + }); + } + await workspaceInitManager.waitForInit( + input.workspaceId, + INIT_WAIT_STEP_MS, + ); + } + if (workspaceInitManager.hasFailed(input.workspaceId)) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: `todo-agent: workspace ${input.workspaceId} の初期化に失敗しました`, + }); + } + + const store = getTodoSessionStore(); + const worktreePath = resolveWorktreePath(input.workspaceId); + if (!worktreePath) { + throw new Error( + `todo-agent: workspace ${input.workspaceId} のパスを解決できませんでした`, + ); + } + + // Compute the final artifact path up-front so the row is + // inserted with its permanent path in one shot. No more + // half-written PENDING rows left behind if the process + // crashes between insert and update. + const sessionId = randomUUID(); + const supervisor = getTodoSupervisor(); + const artifactPath = supervisor.computeArtifactPath({ + sessionId, + workspaceId: input.workspaceId, + }); + + // Fall through to the user's configured defaults when the + // composer did not pick an explicit model / effort. Null + // at both levels means "use Claude Code's own default" + // (we simply omit the CLI flag at spawn time). + const settings = getTodoSettings(); + const resolvedModel = + input.claudeModel !== undefined + ? input.claudeModel + : (settings.defaultClaudeModel ?? null); + const resolvedEffort = + input.claudeEffort !== undefined + ? input.claudeEffort + : (settings.defaultClaudeEffort ?? null); + const resolvedCodexModel = + input.codexModel !== undefined + ? input.codexModel + : (settings.defaultCodexModel ?? null); + const resolvedCodexEffort = + input.codexEffort !== undefined + ? input.codexEffort + : (settings.defaultCodexEffort ?? null); + const resolvedAgentKind = + input.agentKind ?? settings.defaultAgentKind ?? "claude"; + const resolvedCrushModel = + input.crushModel !== undefined + ? input.crushModel + : (settings.defaultCrushModel ?? null); + + const session = store.insertQueuedFromTemplate({ + id: sessionId, + projectId: input.projectId ?? null, + workspaceId: input.workspaceId, + title: input.title ?? "", + description: input.description, + goal: input.goal, + verifyCommand: input.verifyCommand, + maxIterations: input.maxIterations, + maxWallClockSec: input.maxWallClockSec, + customSystemPrompt: input.customSystemPrompt, + claudeModel: resolvedAgentKind === "claude" ? resolvedModel : null, + claudeEffort: + resolvedAgentKind === "claude" ? resolvedEffort : null, + agentKind: resolvedAgentKind, + codexModel: + resolvedAgentKind === "codex" ? resolvedCodexModel : null, + codexEffort: + resolvedAgentKind === "codex" ? resolvedCodexEffort : null, + crushModel: + resolvedAgentKind === "crush" ? resolvedCrushModel : null, + remoteControlEnabled: input.remoteControlEnabled, + artifactPath, + }); + + // Materialize the directory + goal.md. If this throws after + // the row exists the user can delete the broken session + // from the Manager — same as any other filesystem error. + supervisor.prepareArtifacts(session); + writeTodoSessionRuntimeConfig(session.artifactPath, { + ptyEnabled: input.ptyEnabled, + remoteControlEnabled: input.remoteControlEnabled, + }); + + todoAgentMainDebug.info( + "todo-create-request-success", + { + ...getTodoSessionDebugData(session), + ptyEnabled: input.ptyEnabled, + remoteControlEnabled: input.remoteControlEnabled, + claudeModel: resolvedModel, + claudeEffort: resolvedEffort, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-create-request-success"], + }, + ); + return { sessionId: session.id }; + } catch (error) { + todoAgentMainDebug.captureException( + error, + "todo-create-request-failed", + { + workspaceId: input.workspaceId, + projectId: input.projectId ?? null, + ptyEnabled: input.ptyEnabled, + remoteControlEnabled: input.remoteControlEnabled, + }, + { + fingerprint: ["todo.agent.main", "todo-create-request-failed"], + }, + ); + throw error; + } + }), + + list: publicProcedure + .input(z.object({ workspaceId: z.string().min(1) })) + .query(({ input }) => + getTodoSessionStore().listForWorkspace(input.workspaceId), + ), + + // Cross-workspace feed used by the Agent-Manager-style view. + listAll: publicProcedure.query(() => getTodoSessionStore().listAll()), + + enhanceText: publicProcedure + .input(todoEnhanceTextInputSchema) + .mutation(async ({ input }) => { + const { text, attempts } = await enhanceTodoText( + input.text, + input.kind, + ); + if (text === null) { + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: describeEnhanceFailure(attempts), + }); + } + return { text }; + }), + + get: publicProcedure + .input(z.object({ sessionId: z.string().min(1) })) + .query(({ input }) => getTodoSessionStore().get(input.sessionId)), + + /** + * Kick off the headless claude loop for a queued session. There + * is no pane to attach anymore — the supervisor spawns claude as + * a plain child process in the main process and the Manager + * renders the parsed stream events inline. + */ + start: publicProcedure + .input(z.object({ sessionId: z.string().min(1) })) + .mutation(async ({ input }) => { + const store = getTodoSessionStore(); + const session = store.get(input.sessionId); + if (!session) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "セッションが見つかりません", + }); + } + if ( + session.status !== "queued" && + session.status !== "failed" && + session.status !== "aborted" && + session.status !== "escalated" && + // Allow manual "wake now" on a ScheduleWakeup-paused + // session — the user should not have to wait out the + // delay if they already have the context they wanted. + session.status !== "waiting" && + // Allow resuming a completed session so the user can send + // follow-up messages. The supervisor detects the existing + // `claudeSessionId` and issues `--resume` to continue the + // prior conversation rather than starting fresh. + session.status !== "done" + ) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: `このセッションは既に ${session.status} 状態なので開始できません`, + }); + } + store.update(input.sessionId, { + status: "preparing", + phase: "preparing", + // Clear the ScheduleWakeup parking fields so the row + // reflects an active run rather than a pending wake. + waitingUntil: null, + waitingReason: null, + }); + const runtimeConfig = readTodoSessionRuntimeConfig({ + artifactPath: session.artifactPath, + fallbackRemoteControlEnabled: session.remoteControlEnabled ?? false, + }); + todoAgentMainDebug.info( + "todo-start-request", + { + ...getTodoSessionDebugData(session), + runtimeConfigPtyEnabled: runtimeConfig.ptyEnabled, + runtimeConfigRemoteControlEnabled: + runtimeConfig.remoteControlEnabled, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-start-request"], + }, + ); + // Fire-and-forget: the supervisor drives the rest of the loop. + void getTodoSupervisor().start(input.sessionId); + return { ok: true }; + }), + + abort: publicProcedure + .input(z.object({ sessionId: z.string().min(1) })) + .mutation(({ input }) => { + getTodoSupervisor().abort(input.sessionId); + return { ok: true }; + }), + + updateTitle: publicProcedure + .input( + z.object({ + sessionId: z.string().min(1), + title: z.string().trim().min(1).max(200), + }), + ) + .mutation(({ input }) => { + const store = getTodoSessionStore(); + const session = store.get(input.sessionId); + if (!session) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "セッションが見つかりません", + }); + } + store.update(input.sessionId, { title: input.title }); + return { ok: true }; + }), + + /** + * Edit the user-authored fields (description / goal) of a TODO + * session. Allowed in queued / preparing / failed / aborted / + * escalated. `preparing` is safe because the supervisor has + * not spawned Claude yet and `prepareArtifacts` will rewrite + * goal.md before it is read. Refused once the session is + * running / verifying so the worker's prompt never mutates + * under its feet. + */ + updateFields: publicProcedure + .input( + z.object({ + sessionId: z.string().min(1), + description: z.string().trim().min(1).max(10_000).optional(), + goal: z + .string() + .trim() + .max(10_000) + .optional() + .transform((v) => (v && v.length > 0 ? v : undefined)), + clearGoal: z.boolean().optional(), + claudeModel: todoClaudeModelSchema.optional(), + clearClaudeModel: z.boolean().optional(), + claudeEffort: todoClaudeEffortSchema.optional(), + clearClaudeEffort: z.boolean().optional(), + }), + ) + .mutation(({ input }) => { + const store = getTodoSessionStore(); + const session = store.get(input.sessionId); + if (!session) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "セッションが見つかりません", + }); + } + if ( + session.status !== "queued" && + session.status !== "preparing" && + session.status !== "failed" && + session.status !== "aborted" && + session.status !== "escalated" && + // Allow editing resumable done sessions so a subsequent + // `--resume` Start picks up the new description / goal / + // model / effort. `done` without a claudeSessionId isn't + // resumable, but `canStart` on the frontend already + // gates that case, and an accidental save here would + // just be a no-op that prepareArtifacts makes durable. + session.status !== "done" + ) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: + "実行中のセッションは編集できません。中断してから再度お試しください。", + }); + } + const patch: { + description?: string; + goal?: string | null; + claudeModel?: string | null; + claudeEffort?: string | null; + } = {}; + if (input.description !== undefined) { + patch.description = input.description; + } + if (input.clearGoal) { + patch.goal = null; + } else if (input.goal !== undefined) { + patch.goal = input.goal; + } + if (input.clearClaudeModel) { + patch.claudeModel = null; + } else if (input.claudeModel !== undefined) { + patch.claudeModel = input.claudeModel; + } + if (input.clearClaudeEffort) { + patch.claudeEffort = null; + } else if (input.claudeEffort !== undefined) { + patch.claudeEffort = input.claudeEffort; + } + const updated = store.update(input.sessionId, patch); + // Rewrite goal.md so a subsequent Start reads the edited + // content from disk (the iteration prompt tells Claude to + // read that file first, so stale on-disk content would + // silently shadow the edit). + if (updated) { + try { + getTodoSupervisor().prepareArtifacts(updated); + } catch (error) { + console.warn("[todo-agent] goal.md rewrite failed", error); + } + } + return { ok: true }; + }), + + delete: publicProcedure + .input(z.object({ sessionId: z.string().min(1) })) + .mutation(({ input }) => { + const store = getTodoSessionStore(); + const session = store.get(input.sessionId); + if (!session) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "セッションが見つかりません", + }); + } + // Best-effort: make sure the supervisor is not still driving + // the session before we wipe its row. abort() is a no-op if + // the session is not currently active. + try { + getTodoSupervisor().abort(input.sessionId); + } catch (error) { + console.warn("[todo-agent] abort-before-delete failed", error); + } + + const removed = store.remove(input.sessionId); + if (!removed) { + throw new TRPCError({ + code: "INTERNAL_SERVER_ERROR", + message: "セッションの削除に失敗しました", + }); + } + + // Best-effort artifact cleanup. Failure to remove the + // directory should not fail the mutation — the DB row is + // already gone and the directory is just scratch data. + try { + const worktreePath = resolveWorktreePath(session.workspaceId); + if (worktreePath) { + const dir = path.join( + worktreePath, + TODO_ARTIFACT_SUBDIR, + session.id, + ); + rmSync(dir, { recursive: true, force: true }); + } + } catch (error) { + console.warn("[todo-agent] artifact cleanup failed", error); + } + + return { ok: true }; + }), + + rerun: publicProcedure + .input(z.object({ sessionId: z.string().min(1) })) + .mutation(({ input }) => { + const store = getTodoSessionStore(); + const source = store.get(input.sessionId); + if (!source) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "元セッションが見つかりません", + }); + } + todoAgentMainDebug.info( + "todo-rerun-request", + getTodoSessionDebugData(source), + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-rerun-request"], + }, + ); + + // Create a brand-new queued session that copies the user- + // authored fields from the source. Verdict / iteration / + // pane attachment are reset so the new session starts + // clean in the Agent Manager. + const nextId = randomUUID(); + const supervisor = getTodoSupervisor(); + const artifactPath = supervisor.computeArtifactPath({ + sessionId: nextId, + workspaceId: source.workspaceId, + }); + + const next = store.insert({ + id: nextId, + projectId: source.projectId, + workspaceId: source.workspaceId, + title: source.title, + description: source.description, + goal: source.goal, + verifyCommand: source.verifyCommand, + maxIterations: source.maxIterations, + maxWallClockSec: source.maxWallClockSec, + status: "queued", + phase: "queued", + iteration: 0, + attachedPaneId: null, + attachedTabId: null, + claudeSessionId: null, + finalAssistantText: null, + totalCostUsd: null, + totalNumTurns: null, + pendingIntervention: null, + startHeadSha: null, + customSystemPrompt: source.customSystemPrompt, + claudeModel: source.claudeModel, + claudeEffort: source.claudeEffort, + agentKind: source.agentKind ?? "claude", + codexModel: source.codexModel, + codexEffort: source.codexEffort, + crushModel: source.crushModel ?? null, + remoteControlEnabled: source.remoteControlEnabled ?? false, + verdictPassed: null, + verdictReason: null, + verdictFailingTest: null, + artifactPath, + waitingUntil: null, + waitingReason: null, + startedAt: null, + completedAt: null, + }); + + supervisor.prepareArtifacts(next); + const runtimeConfig = readTodoSessionRuntimeConfig({ + artifactPath: source.artifactPath, + fallbackRemoteControlEnabled: source.remoteControlEnabled ?? false, + }); + writeTodoSessionRuntimeConfig(next.artifactPath, runtimeConfig); + todoAgentMainDebug.info( + "todo-rerun-request-success", + { + sourceSessionId: source.id, + nextSessionId: next.id, + nextArtifactPath: next.artifactPath, + ptyEnabled: runtimeConfig.ptyEnabled, + remoteControlEnabled: runtimeConfig.remoteControlEnabled, + }, + { + captureMessage: true, + fingerprint: ["todo.agent.main", "todo-rerun-request-success"], + }, + ); + + return { sessionId: next.id }; + }), + + /** + * Queue a user intervention for the next turn. Headless mode + * cannot inject text mid-stream, so interventions land at the + * next iteration boundary. + * + * Sending a message to a terminal session (done/failed/aborted/ + * escalated) that still has a `claudeSessionId` auto-resumes the + * conversation: the message is buffered, the row flips to + * `preparing`, and the supervisor reruns with `--resume `. + * Without this, the queued message would sit unread until the + * user manually clicked Start — the exact friction #241 called + * out for the `done` case. + */ + sendInput: publicProcedure + .input(todoSendInputSchema) + .mutation(({ input }) => { + const store = getTodoSessionStore(); + const session = store.get(input.sessionId); + if (!session) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "セッションが見つかりません", + }); + } + const supervisor = getTodoSupervisor(); + supervisor.queueIntervention(input.sessionId, input.data); + + const isTerminal = + session.status === "done" || + session.status === "failed" || + session.status === "aborted" || + session.status === "escalated"; + if (isTerminal && session.claudeSessionId) { + store.update(input.sessionId, { + status: "preparing", + phase: "preparing", + waitingUntil: null, + waitingReason: null, + }); + void supervisor.start(input.sessionId); + } + return { ok: true }; + }), + + /** + * Snapshot of the in-memory stream events buffer for a session. + * Used by the Manager to paint the initial state of the detail + * pane before the subscription takes over. + */ + getStream: publicProcedure + .input(z.object({ sessionId: z.string().min(1) })) + .query(({ input }) => + getTodoSessionStore().getStreamEvents(input.sessionId), + ), + + /** + * Live stream events (assistant text, tool calls, verify results, + * errors) for the selected session. Emits the in-memory tail on + * subscribe then fans out every subsequent append. + */ + /** + * Per-session git snapshot: branch, current vs session-start HEAD, + * commits produced since the session started, working-tree files. + * The right-sidebar in the Manager polls this every few seconds + * while the session is live. + */ + gitSnapshot: publicProcedure + .input(z.object({ sessionId: z.string().min(1) })) + .query(async ({ input }) => { + const session = getTodoSessionStore().get(input.sessionId); + if (!session) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "セッションが見つかりません", + }); + } + const worktreePath = resolveWorktreePath(session.workspaceId); + if (!worktreePath) { + throw new TRPCError({ + code: "PRECONDITION_FAILED", + message: "ワークスペースのパスを解決できませんでした", + }); + } + return getSessionGitSnapshot({ + cwd: worktreePath, + startHeadSha: session.startHeadSha ?? null, + }); + }), + + /** + * Unified diff for a single file at a user-selected scope + * (session-range / staged / unstaged / a specific commit). + */ + gitFileDiff: publicProcedure + .input( + z.object({ + sessionId: z.string().min(1), + path: z.string().min(1), + scope: z.enum(["session", "staged", "unstaged", "commit"]), + commitSha: z.string().optional(), + }), + ) + .query(async ({ input }) => { + const session = getTodoSessionStore().get(input.sessionId); + if (!session) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "セッションが見つかりません", + }); + } + const worktreePath = resolveWorktreePath(session.workspaceId); + if (!worktreePath) return ""; + const diff = await getSessionFileDiff({ + cwd: worktreePath, + startHeadSha: session.startHeadSha ?? null, + path: input.path, + scope: input.scope as SessionDiffScope, + commitSha: input.commitSha, + }); + return diff; + }), + + subscribeStream: publicProcedure + .input(z.object({ sessionId: z.string().min(1) })) + .subscription(({ input }) => { + return observable((emit) => { + const store = getTodoSessionStore(); + const initial = store.getStreamEvents(input.sessionId); + if (initial.length > 0) { + emit.next({ + sessionId: input.sessionId, + events: initial, + }); + } + const unsubscribe = store.subscribeStream(input.sessionId, (update) => + emit.next(update), + ); + return () => unsubscribe(); + }); + }), + + subscribeState: publicProcedure + .input(z.object({ sessionId: z.string().min(1) })) + .subscription(({ input }) => { + return observable((emit) => { + const store = getTodoSessionStore(); + // Emit current state immediately on subscribe. + const current = store.get(input.sessionId); + if (current) { + emit.next({ sessionId: current.id, session: current }); + } + const unsubscribe = store.subscribe(input.sessionId, (event) => { + emit.next(event); + }); + return () => unsubscribe(); + }); + }), + + /** + * CRUD for reusable system-prompt templates the user attaches + * to new TODO sessions. Managed from the Agent Manager's + * Settings panel. + */ + presets: router({ + list: publicProcedure.query(() => + localDb + .select() + .from(todoPromptPresets) + .orderBy(desc(todoPromptPresets.updatedAt)) + .all(), + ), + create: publicProcedure + .input(todoPresetCreateInputSchema) + .mutation(({ input }) => { + const now = Date.now(); + const row = localDb + .insert(todoPromptPresets) + .values({ + name: input.name, + content: input.content, + kind: input.kind, + workspaceId: input.workspaceId ?? null, + createdAt: now, + updatedAt: now, + }) + .returning() + .get(); + return row; + }), + update: publicProcedure + .input(todoPresetUpdateInputSchema) + .mutation(({ input }) => { + const patch: { + name: string; + content: string; + updatedAt: number; + kind?: "system" | "description" | "goal"; + workspaceId?: string | null; + } = { + name: input.name, + content: input.content, + updatedAt: Date.now(), + }; + if (input.kind !== undefined) patch.kind = input.kind; + if (input.workspaceId !== undefined) + patch.workspaceId = input.workspaceId; + const row = localDb + .update(todoPromptPresets) + .set(patch) + .where(eq(todoPromptPresets.id, input.id)) + .returning() + .get(); + if (!row) { + throw new TRPCError({ + code: "NOT_FOUND", + message: "プリセットが見つかりません", + }); + } + return row; + }), + delete: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .mutation(({ input }) => { + const result = localDb + .delete(todoPromptPresets) + .where(eq(todoPromptPresets.id, input.id)) + .run(); + return { ok: result.changes > 0 }; + }), + }), + + /** + * Save a pasted/dropped image (or any binary) to disk and return + * its absolute path. Used by the composer/intervention textareas + * to let the user embed screenshots via paste or drag-and-drop. + * The returned path can be referenced from the Claude prompt as + * a markdown image (`![](path)`) — Claude's Read tool opens it. + */ + saveAttachment: publicProcedure + .input( + z.object({ + fileName: z.string().min(1).max(200), + mimeType: z.string().min(1).max(120), + // Hard cap ~15MB raw binary (= ~20MB base64 chars). + // Client-side paste handler enforces a 10MB ceiling; + // this larger server bound absorbs rounding + encoding + // overhead while still blocking absurd paste payloads + // before they hit the tRPC channel. + dataBase64: z + .string() + .min(1) + .max(20 * 1024 * 1024), + }), + ) + .mutation(({ input }) => { + const dir = path.join( + app.getPath("userData"), + "todo-agent", + "attachments", + ); + mkdirSync(dir, { recursive: true }); + const extFromName = path.extname(input.fileName).toLowerCase(); + const extFromMime = + input.mimeType === "image/png" + ? ".png" + : input.mimeType === "image/jpeg" || input.mimeType === "image/jpg" + ? ".jpg" + : input.mimeType === "image/gif" + ? ".gif" + : input.mimeType === "image/webp" + ? ".webp" + : ""; + const ext = extFromName || extFromMime || ".bin"; + const safeName = input.fileName.replace(/[^\w.-]/g, "_").slice(0, 80); + const filename = `${randomUUID()}-${safeName}${ + safeName.toLowerCase().endsWith(ext) ? "" : ext + }`; + const filePath = path.join(dir, filename); + const buf = Buffer.from(input.dataBase64, "base64"); + writeFileSync(filePath, buf); + return { path: filePath }; + }), + + /** + * Read an image attachment back from disk so the renderer can + * preview it inline. Restricted to the saveAttachment output + * directory to prevent the renderer from coercing the main + * process into reading arbitrary user files via this channel. + */ + readAttachment: publicProcedure + .input(z.object({ path: z.string().min(1).max(4096) })) + .query(({ input }) => { + const dir = path.resolve( + path.join(app.getPath("userData"), "todo-agent", "attachments"), + ); + const resolved = path.resolve(input.path); + const dirPrefix = dir.endsWith(path.sep) ? dir : dir + path.sep; + if (!resolved.startsWith(dirPrefix)) { + throw new TRPCError({ + code: "FORBIDDEN", + message: "添付ディレクトリ外のパスは読み取れません", + }); + } + let buf: Buffer; + try { + buf = readFileSync(resolved); + } catch (error) { + throw new TRPCError({ + code: "NOT_FOUND", + message: + error instanceof Error + ? `添付ファイルを読めませんでした: ${error.message}` + : "添付ファイルを読めませんでした", + }); + } + const ext = path.extname(resolved).toLowerCase(); + const mimeType = + ext === ".png" + ? "image/png" + : ext === ".jpg" || ext === ".jpeg" + ? "image/jpeg" + : ext === ".gif" + ? "image/gif" + : ext === ".webp" + ? "image/webp" + : ext === ".svg" + ? "image/svg+xml" + : "application/octet-stream"; + return { + mimeType, + dataBase64: buf.toString("base64"), + byteLength: buf.byteLength, + }; + }), + + settings: router({ + get: publicProcedure.query(() => getTodoSettings()), + update: publicProcedure + .input(todoSettingsUpdateSchema) + .mutation(({ input }) => { + const next = updateTodoSettings(input); + // Nudge the supervisor so a raised `maxConcurrentTasks` + // immediately releases queued sessions. Without this, a + // bump from 1 → N leaves already-pending tasks waiting + // until the currently running session finishes. + getTodoSupervisor().handleSettingsChanged(); + return next; + }), + }), + + schedule: router({ + list: publicProcedure + .input(z.object({ projectId: z.string().min(1) })) + .query(({ input }) => + getTodoScheduleStore().listForProject(input.projectId), + ), + listAll: publicProcedure.query(() => getTodoScheduleStore().listAll()), + create: publicProcedure + .input(todoScheduleCreateInputSchema) + .mutation(({ input }) => { + const nextRunAt = input.enabled + ? computeNextRunAt( + { + frequency: input.frequency, + minute: input.minute ?? null, + hour: input.hour ?? null, + weekday: input.weekday ?? null, + monthday: input.monthday ?? null, + cronExpr: input.cronExpr ?? null, + }, + new Date(), + ) + : null; + const row = getTodoScheduleStore().insert({ + ...input, + nextRunAt, + }); + return row; + }), + update: publicProcedure + .input(todoScheduleUpdateInputSchema) + .mutation(({ input }) => { + const row = getTodoScheduleStore().update(input); + if (row) { + getTodoScheduler().refreshNextRunAt(row.id); + } + return row ?? null; + }), + setEnabled: publicProcedure + .input( + z.object({ + id: z.string().min(1), + enabled: z.boolean(), + }), + ) + .mutation(({ input }) => { + const row = getTodoScheduleStore().setEnabled( + input.id, + input.enabled, + ); + if (row) { + getTodoScheduler().refreshNextRunAt(row.id); + } + return row ?? null; + }), + delete: publicProcedure + .input(z.object({ id: z.string().min(1) })) + .mutation(({ input }) => { + const ok = getTodoScheduleStore().delete(input.id); + return { ok }; + }), + previewNextRun: publicProcedure + .input( + z.object({ + frequency: z.enum([ + "hourly", + "daily", + "weekly", + "monthly", + "custom", + ]), + minute: z.number().int().min(0).max(59).nullish(), + hour: z.number().int().min(0).max(23).nullish(), + weekday: z.number().int().min(0).max(6).nullish(), + monthday: z.number().int().min(1).max(31).nullish(), + cronExpr: z.string().trim().max(200).nullish(), + }), + ) + .query(({ input }) => + computeNextRunAt( + { + frequency: input.frequency, + minute: input.minute ?? null, + hour: input.hour ?? null, + weekday: input.weekday ?? null, + monthday: input.monthday ?? null, + cronExpr: input.cronExpr ?? null, + }, + new Date(), + ), + ), + onFire: publicProcedure.subscription(() => + observable((emit) => { + const off = getTodoScheduleStore().onFire((event) => { + emit.next(event); + }); + return () => { + off(); + }; + }), + ), + }), + crushModels: publicProcedure.query(async () => { + const now = Date.now(); + if (crushModelsCache.expiresAt > now) { + return crushModelsCache.values; + } + if (crushModelsCache.inflight) { + return crushModelsCache.inflight; + } + + crushModelsCache.inflight = (async () => { + const { execFile } = await import("node:child_process"); + const { promisify } = await import("node:util"); + const execFileAsync = promisify(execFile); + const bin = + process.env.TODO_CRUSH_BIN || process.env.CRUSH_BIN || "crush"; + try { + const { stdout } = await execFileAsync(bin, ["models"], { + timeout: 10_000, + }); + const values = stdout + .trim() + .split("\n") + .map((l) => l.trim()) + .filter(Boolean); + crushModelsCache.values = values; + crushModelsCache.expiresAt = Date.now() + CRUSH_MODELS_CACHE_TTL_MS; + return values; + } catch { + return crushModelsCache.values.length > 0 + ? crushModelsCache.values + : []; + } finally { + crushModelsCache.inflight = null; + } + })(); + return crushModelsCache.inflight; + }), + }); +}; + +export type TodoAgentRouter = ReturnType; diff --git a/apps/desktop/src/main/todo-agent/types.ts b/apps/desktop/src/main/todo-agent/types.ts new file mode 100644 index 00000000000..96613ed375c --- /dev/null +++ b/apps/desktop/src/main/todo-agent/types.ts @@ -0,0 +1,459 @@ +import type { + SelectTodoSchedule, + SelectTodoSession, + TodoScheduleFrequency, + TodoScheduleOverlapMode, +} from "@superset/local-db"; +import { z } from "zod"; + +/** + * Row shape returned by the cross-workspace `todoAgent.listAll` query: + * the session fields + the joined workspace / project names so the + * Agent-Manager view can group and label rows without N+1 queries. + */ +export interface TodoSessionListEntry extends SelectTodoSession { + workspaceName: string | null; + workspaceBranch: string | null; + projectName: string | null; +} + +// ---- Agent kind ---- + +export const AGENT_KIND_OPTIONS = ["claude", "codex", "crush"] as const; +export type AgentKind = (typeof AGENT_KIND_OPTIONS)[number]; +export const agentKindSchema = z.enum(AGENT_KIND_OPTIONS); +export const DEFAULT_AGENT_KIND: AgentKind = "claude"; + +/** + * Codex CLI `--model` values we allow the user to pick from the UI. + * Codex uses OpenAI model identifiers directly. Kept open-ended (plus a + * default `null` in the storage layer) so new models do not require a + * migration. `default` is the UI-side sentinel that maps to `null` (don't + * pass `--model` at all; let Codex use whatever the user's own config chose). + */ +export const CODEX_MODEL_OPTIONS = [ + "gpt-5.4", + "gpt-5.2-codex", + "gpt-5.1-codex-max", + "gpt-5.4-mini", + "gpt-5.3-codex", + "gpt-5.3-codex-spark", + "gpt-5.2", + "gpt-5.1-codex-mini", +] as const; + +export type TodoCodexModel = (typeof CODEX_MODEL_OPTIONS)[number]; + +export const todoCodexModelSchema = z.enum(CODEX_MODEL_OPTIONS); + +/** + * Codex CLI `model_reasoning_effort` config values. Mirrors the Codex Rust + * source `ReasoningEffort` enum: none / minimal / low / medium / high / xhigh. + * UI-side sentinel `__default__` maps to `null` (don't override). + */ +export const CODEX_EFFORT_OPTIONS = [ + "none", + "minimal", + "low", + "medium", + "high", + "xhigh", +] as const; + +export type TodoCodexEffort = (typeof CODEX_EFFORT_OPTIONS)[number]; + +export const todoCodexEffortSchema = z.enum(CODEX_EFFORT_OPTIONS); + +// ---- Claude Code model / effort options ---- + +/** + * Claude Code `--model` values we allow the user to pick from the UI. + * Aliases cover "latest of this tier"; full model names pin a specific + * release. Kept open-ended (plus a default `null` in the storage layer) + * so new models do not require a migration. `default` is the UI-side + * sentinel that maps to `null` (don't pass `--model` at all; let Claude + * Code use whatever the user's own config / ~/.claude.json chose). + */ +export const CLAUDE_MODEL_OPTIONS = [ + "opus", + "sonnet", + "haiku", + "claude-opus-4-7", + "claude-sonnet-4-6", + "claude-haiku-4-5-20251001", +] as const; + +export type TodoClaudeModel = (typeof CLAUDE_MODEL_OPTIONS)[number]; + +export const todoClaudeModelSchema = z.enum(CLAUDE_MODEL_OPTIONS); + +/** + * Claude Code `--effort` levels. `default` is the UI-side sentinel for + * "don't pass the flag"; actual persisted values are `low`..`max` or + * null. + * + * Thinking support is model-gated in Claude Code; the CLI rejects an + * incompatible effort level at launch. We intentionally don't duplicate + * that matrix here so adding a new model tier on the CLI side doesn't + * require a fork update. The UI surfaces a warning but allows the + * combination; the supervisor forwards whatever the user picked. + */ +export const CLAUDE_EFFORT_OPTIONS = [ + "low", + "medium", + "high", + "xhigh", + "max", +] as const; + +export type TodoClaudeEffort = (typeof CLAUDE_EFFORT_OPTIONS)[number]; + +export const todoClaudeEffortSchema = z.enum(CLAUDE_EFFORT_OPTIONS); + +export const todoCreateInputSchema = z.object({ + workspaceId: z.string().min(1), + projectId: z.string().optional(), + title: z.string().trim().max(200).optional(), + description: z.string().min(1).max(10_000), + // Optional: when omitted, the session treats "やって欲しいこと + // (description) が完了したとき" as the implicit goal. + goal: z + .string() + .trim() + .max(10_000) + .optional() + .transform((v) => (v && v.length > 0 ? v : undefined)), + // Optional: when omitted, the session runs as a single-turn task + // (research / investigation / one-shot). When provided, it is the + // decisive gate for the iteration loop. + verifyCommand: z + .string() + .trim() + .max(10_000) + .optional() + .transform((v) => (v && v.length > 0 ? v : undefined)), + maxIterations: z.number().int().min(1).max(100).default(10), + maxWallClockSec: z + .number() + .int() + .min(60) + .max(60 * 60 * 4) + .default(1800), + // Optional free-form text the user attached at creation time, + // usually pulled from a saved preset. For Claude Code sessions, + // passed via `--append-system-prompt`. For Codex sessions, passed + // via `--developer-instructions`. + customSystemPrompt: z + .string() + .trim() + .max(20_000) + .optional() + .transform((v) => (v && v.length > 0 ? v : undefined)), + // Which agent CLI to use for this session. When omitted/undefined, + // the tRPC router resolves from the user's configured default. + agentKind: agentKindSchema.optional(), + // Optional per-session Claude Code CLI overrides. Null / undefined + // means "use the user's configured default" (see todoSettingsSchema). + claudeModel: todoClaudeModelSchema.nullish(), + claudeEffort: todoClaudeEffortSchema.nullish(), + // Optional per-session Codex CLI overrides. Null / undefined means + // "use the user's configured default". Only read when agentKind is + // "codex"; ignored for Claude sessions. + codexModel: todoCodexModelSchema.nullish(), + codexEffort: todoCodexEffortSchema.nullish(), + // Optional per-session Crush CLI model override. Null / undefined means + // "use the user's configured default". Only read when agentKind is + // "crush"; ignored for Claude / Codex sessions. The value is a free-form + // string in the form "provider/model" (e.g. "openai/gpt-5.4") resolved + // dynamically from `crush models`. No effort option — Crush CLI lacks one. + crushModel: z.string().trim().max(200).nullish(), + // Beta escape hatch: opt a single TODO into the interactive PTY + // engine without flipping the whole app over from headless `-p`. + // Persisted in the artifact runtime config, not the DB row. + // Claude Code only — Codex always uses headless exec mode. + ptyEnabled: z.boolean().optional().default(false), + // When true, the PTY runner sends `/remote-control` after spawn so + // the session becomes reachable from claude.ai/code / Claude mobile. + // Requires `ptyEnabled=true`; the UI prevents invalid combinations. + // Claude Code only. + remoteControlEnabled: z.boolean().optional().default(false), +}); + +export const todoPresetKindSchema = z.enum(["system", "description", "goal"]); +export type TodoPresetKind = z.infer; + +export const todoPresetCreateInputSchema = z.object({ + name: z.string().trim().min(1).max(120), + content: z.string().trim().min(1).max(20_000), + kind: todoPresetKindSchema.default("system"), + workspaceId: z.string().min(1).optional(), +}); + +export const todoPresetUpdateInputSchema = z.object({ + id: z.string().min(1), + name: z.string().trim().min(1).max(120), + content: z.string().trim().min(1).max(20_000), + kind: todoPresetKindSchema.optional(), + workspaceId: z.string().min(1).nullable().optional(), +}); + +export const todoEnhanceTextInputSchema = z.object({ + text: z.string().trim().min(1).max(10_000), + kind: z.enum(["description", "goal"]), +}); + +export type TodoEnhanceTextInput = z.infer; + +export type TodoCreateInput = z.infer; + +export const todoSettingsSchema = z.object({ + defaultMaxIterations: z.number().int().min(1).max(100).default(10), + defaultMaxWallClockMin: z.number().int().min(1).max(240).default(30), + maxConcurrentTasks: z.number().int().min(1).max(10).default(1), + // 0 = 無制限 (手動削除のみ). 1-365 = その日数より古い終了済み + // セッションを起動時に自動削除する (queued / running / paused は対象外)。 + sessionRetentionDays: z.number().int().min(0).max(365).default(0), + // Default agent CLI for new TODO sessions. + defaultAgentKind: agentKindSchema.default(DEFAULT_AGENT_KIND), + // Global defaults used when the TODO composer / ScheduleEditor does + // not override them. Null = let Claude Code resolve its own default + // (user config cascade). Stored as nullable so the user can pick + // "default" in the settings UI. + defaultClaudeModel: todoClaudeModelSchema.nullish().default(null), + defaultClaudeEffort: todoClaudeEffortSchema.nullish().default(null), + // Global defaults for Codex sessions. + defaultCodexModel: todoCodexModelSchema.nullish().default(null), + defaultCodexEffort: todoCodexEffortSchema.nullish().default(null), + // Global default for Crush sessions. Free-form string ("provider/model"). + defaultCrushModel: z.string().trim().max(200).nullish().default(null), +}); + +export type TodoSettings = z.infer; + +export const todoSettingsUpdateSchema = todoSettingsSchema.partial(); + +export const todoAttachPaneInputSchema = z.object({ + sessionId: z.string().min(1), + tabId: z.string().min(1), + paneId: z.string().min(1), +}); + +export type TodoAttachPaneInput = z.infer; + +export const todoSendInputSchema = z.object({ + sessionId: z.string().min(1), + data: z.string().min(1), +}); + +export type TodoSendInput = z.infer; + +/** + * Event published on state changes so the tRPC subscription can fan out to + * the renderer. Kept small and serializable. + */ +export interface TodoSessionStateEvent { + sessionId: string; + session: SelectTodoSession; +} + +export type TodoSessionPhase = + | "queued" + | "preparing" + | "running" + | "verifying" + | "done" + | "failed" + | "escalated" + | "aborted" + | "paused" + | "waiting"; + +export const TODO_ARTIFACT_SUBDIR = ".superset/todo"; + +// ---- Headless stream-json events ---- +// +// These types describe the NDJSON messages Claude Code emits on stdout when +// invoked with `-p --output-format stream-json`. We do not attempt to cover +// the full schema; we only name the shapes the TODO supervisor needs to +// reason about. Unknown event types fall through as the base `raw` variant. +// See: https://code.claude.com/docs/en/headless + +export type TodoStreamEventKind = + | "system_init" + | "assistant_text" + | "tool_use" + | "tool_result" + | "result" + | "error" + | "raw" + // PTY engine (`TODO_ENGINE=pty`) emits these when Remote Control is + // enabled on the session. `remote_control` carries the connection URL + // (`https://claude.ai/code/session_...`) the UI surfaces as a badge; + // `remote_control_error` is non-fatal — the turn continues without RC. + // See apps/desktop/plans/20260417-todo-agent-remote-control.md. + | "remote_control" + | "remote_control_error"; + +/** + * One condensed event we store in the per-session in-memory buffer and send + * over the subscription. Raw NDJSON is kept for the `raw` variant so the UI + * can always show unparsed context for debugging. + */ +export interface TodoStreamEvent { + /** Stable id so React can key on it without re-rendering siblings. */ + id: string; + /** Millisecond timestamp when the event was observed by the supervisor. */ + ts: number; + /** Turn number this event belongs to (1-based, bumped on each iteration). */ + iteration: number; + kind: TodoStreamEventKind; + /** One-line label used by the renderer (e.g. "User", "Claude", "Bash"). */ + label: string; + /** Human-readable body text, already stripped of ANSI. */ + text: string; + /** Optional raw payload for the "raw" / debug kind. */ + raw?: unknown; + /** + * The Anthropic tool-use block id this event corresponds to. + * - For `tool_use` events: the id of the tool_use content block. + * - For `tool_result` events: the `tool_use_id` the result answers. + * Lets the UI pair tool_use ↔ tool_result by id instead of position, + * which is robust to concurrent / out-of-order SDK emissions. + */ + toolUseId?: string; + /** + * Set on messages emitted from inside a subagent's context (i.e. when + * the main session invoked the `Task`/`Agent` tool). Its value is the + * tool_use id of the parent Agent tool call. The UI uses this to nest + * sub-tool activity under the parent Agent card, matching the VSCode + * Claude Code extension's presentation. + * See: https://docs.claude.com/en/docs/agent-sdk/ (Subagents) + */ + parentToolUseId?: string; +} + +export interface TodoStreamUpdate { + sessionId: string; + events: TodoStreamEvent[]; +} + +// ---- Schedules ---- + +export const todoScheduleFrequencySchema = z.enum([ + "hourly", + "daily", + "weekly", + "monthly", + "custom", +]); + +export const todoScheduleOverlapModeSchema = z.enum(["skip", "queue"]); + +export const todoScheduleCreateInputSchema = z + .object({ + projectId: z.string().min(1), + // Null/omitted means "run on the project's main repo path" (the + // non-worktree source tree). Set to a workspace id to bind the + // schedule to a specific worktree instead. + workspaceId: z.string().min(1).nullish(), + name: z.string().trim().min(1).max(120), + enabled: z.boolean().default(true), + frequency: todoScheduleFrequencySchema, + minute: z.number().int().min(0).max(59).nullish(), + hour: z.number().int().min(0).max(23).nullish(), + weekday: z.number().int().min(0).max(6).nullish(), + monthday: z.number().int().min(1).max(31).nullish(), + cronExpr: z.string().trim().min(1).max(200).nullish(), + title: z.string().trim().min(1).max(200), + description: z.string().trim().min(1).max(10_000), + goal: z.string().trim().max(10_000).nullish(), + verifyCommand: z.string().trim().max(10_000).nullish(), + maxIterations: z.number().int().min(1).max(100).default(10), + maxWallClockSec: z + .number() + .int() + .min(60) + .max(60 * 60 * 4) + .default(1800), + customSystemPrompt: z.string().trim().max(20_000).nullish(), + claudeModel: todoClaudeModelSchema.nullish(), + claudeEffort: todoClaudeEffortSchema.nullish(), + overlapMode: todoScheduleOverlapModeSchema.default("skip"), + autoSyncBeforeFire: z.boolean().default(false), + }) + .refine( + (v) => + v.frequency !== "custom" || + (typeof v.cronExpr === "string" && v.cronExpr.length > 0), + { + message: "cronExpr is required when frequency is 'custom'", + path: ["cronExpr"], + }, + ); + +export type TodoScheduleCreateInput = z.infer< + typeof todoScheduleCreateInputSchema +>; + +const todoScheduleBaseSchema = z.object({ + projectId: z.string().min(1), + workspaceId: z.string().min(1).nullish(), + name: z.string().trim().min(1).max(120), + enabled: z.boolean(), + frequency: todoScheduleFrequencySchema, + minute: z.number().int().min(0).max(59).nullish(), + hour: z.number().int().min(0).max(23).nullish(), + weekday: z.number().int().min(0).max(6).nullish(), + monthday: z.number().int().min(1).max(31).nullish(), + cronExpr: z.string().trim().min(1).max(200).nullish(), + title: z.string().trim().min(1).max(200), + description: z.string().trim().min(1).max(10_000), + goal: z.string().trim().max(10_000).nullish(), + verifyCommand: z.string().trim().max(10_000).nullish(), + maxIterations: z.number().int().min(1).max(100), + maxWallClockSec: z + .number() + .int() + .min(60) + .max(60 * 60 * 4), + customSystemPrompt: z.string().trim().max(20_000).nullish(), + claudeModel: todoClaudeModelSchema.nullish(), + claudeEffort: todoClaudeEffortSchema.nullish(), + overlapMode: todoScheduleOverlapModeSchema, + autoSyncBeforeFire: z.boolean(), +}); + +// projectId is intentionally omitted from the update surface: a schedule's +// project is immutable, otherwise `lastRunSessionId` could point at a +// session from a different project than the schedule currently belongs to. +// Users who want to move a schedule to another project should recreate it. +export const todoScheduleUpdateInputSchema = todoScheduleBaseSchema + .omit({ projectId: true }) + .partial() + .extend({ id: z.string().min(1) }); + +export type TodoScheduleUpdateInput = z.infer< + typeof todoScheduleUpdateInputSchema +>; + +/** + * Event emitted by the scheduler when a schedule fires. The renderer uses + * this to show a toast and, when `sessionId` is non-null, deep-link to the + * freshly-created session. + */ +export type TodoScheduleFireKind = "triggered" | "skipped" | "failed"; + +export interface TodoScheduleFireEvent { + scheduleId: string; + scheduleName: string; + kind: TodoScheduleFireKind; + sessionId: string | null; + message: string | null; + firedAt: number; +} + +export type { + SelectTodoSchedule, + TodoScheduleFrequency, + TodoScheduleOverlapMode, +}; diff --git a/apps/desktop/src/main/todo-daemon/codex-turn-runner.ts b/apps/desktop/src/main/todo-daemon/codex-turn-runner.ts new file mode 100644 index 00000000000..da0c425f6f3 --- /dev/null +++ b/apps/desktop/src/main/todo-daemon/codex-turn-runner.ts @@ -0,0 +1,514 @@ +import { type ChildProcess, spawn } from "node:child_process"; +import { randomUUID } from "node:crypto"; +import { getTodoSessionStore } from "main/todo-agent/session-store"; +import { + CODEX_EFFORT_OPTIONS, + CODEX_MODEL_OPTIONS, + type TodoStreamEventKind, +} from "main/todo-agent/types"; + +/** + * Codex CLI (`codex exec`) turn runner. + * + * Runs a single Codex iteration via `codex exec --json --full-auto` and + * parses the NDJSON event stream emitted on stdout. Structured events are + * classified into the same `TodoStreamEvent` shape the Claude Code runner + * uses so the supervisor engine and UI can remain agent-agnostic. + * + * Session resume is supported via `codex exec resume `. + * + * See: https://developers.openai.com/codex/noninteractive + * Source: github.com/openai/codex/codex-rs/exec/src/exec_events.rs + */ + +export interface CodexTurnParams { + sessionId: string; + iteration: number; + cwd: string; + prompt: string; + resumeThreadId: string | null; + customSystemPrompt: string | null; + codexModel: string | null; + codexEffort: string | null; + signal: AbortSignal; + onChild: (child: ChildProcess) => void; +} + +export interface CodexTurnResult { + result: string | null; + threadId: string | null; + costUsd: number | null; + numTurns: number | null; + error: string | null; + interrupted: boolean; +} + +const CODEX_BIN = + process.env.TODO_CODEX_BIN || process.env.CODEX_BIN || "codex"; + +export async function runCodexTurn( + params: CodexTurnParams, +): Promise { + const args = buildArgs(params); + + let child: ChildProcess; + try { + child = spawn(CODEX_BIN, args, { + cwd: params.cwd, + env: { + ...process.env, + // Ensure Codex uses the workspace cwd. + }, + detached: process.platform !== "win32", + }); + } catch (error) { + return { + result: null, + threadId: null, + costUsd: null, + numTurns: null, + error: + error instanceof Error + ? `codex を起動できませんでした: ${error.message}` + : "codex を起動できませんでした", + interrupted: false, + }; + } + + params.onChild(child); + + let threadId: string | null = null; + let resultText: string | null = null; + let numTurns: number | null = null; + let errorText: string | null = null; + let stdoutBuffer = ""; + let stderrBuffer = ""; + let settled = false; + let interruptedForIntervention = false; + + const onAbort = () => { + if (child.pid) { + killProcessTree(child.pid, "SIGINT"); + } + }; + params.signal.addEventListener("abort", onAbort); + + const interventionPoll = setInterval(() => { + if (settled || params.signal.aborted) { + clearInterval(interventionPoll); + return; + } + const live = getTodoSessionStore().get(params.sessionId); + if (live?.pendingIntervention?.trim()) { + interruptedForIntervention = true; + clearInterval(interventionPoll); + appendRawEvent( + params.sessionId, + params.iteration, + "system_init", + "介入", + "ユーザ介入を検知。現在のターンを中断して介入内容で再開します…", + ); + try { + if (child.pid) { + killProcessTree(child.pid, "SIGINT"); + } else { + child.kill("SIGINT"); + } + } catch { + // ignore + } + } + }, 500); + + return new Promise((resolve) => { + const settle = () => { + if (settled) return; + settled = true; + clearInterval(interventionPoll); + params.signal.removeEventListener("abort", onAbort); + if (stdoutBuffer.trim().length > 0) { + handleLine(stdoutBuffer.trim()); + stdoutBuffer = ""; + } + resolve({ + result: resultText, + threadId, + costUsd: null, + numTurns, + error: interruptedForIntervention ? null : errorText, + interrupted: interruptedForIntervention, + }); + }; + + const drainLines = (chunk: string) => { + stdoutBuffer += chunk; + let newlineIdx = stdoutBuffer.indexOf("\n"); + while (newlineIdx !== -1) { + const line = stdoutBuffer.slice(0, newlineIdx).trim(); + stdoutBuffer = stdoutBuffer.slice(newlineIdx + 1); + if (line.length > 0) { + handleLine(line); + } + newlineIdx = stdoutBuffer.indexOf("\n"); + } + }; + + const handleLine = (line: string) => { + let payload: unknown; + try { + payload = JSON.parse(line); + } catch { + appendRawEvent( + params.sessionId, + params.iteration, + "raw", + "raw", + line.slice(0, 600), + ); + return; + } + const parsed = classifyCodexEvent(payload); + if (parsed.threadId && !threadId) { + threadId = parsed.threadId; + } + if (parsed.resultText) { + resultText = parsed.resultText; + } + if (parsed.numTurns != null) { + numTurns = parsed.numTurns; + } + if (parsed.errorText && !errorText) { + errorText = parsed.errorText; + } + for (const evt of parsed.events) { + getTodoSessionStore().appendStreamEvents(params.sessionId, [ + { + id: randomUUID(), + ts: Date.now(), + iteration: params.iteration, + kind: evt.kind, + label: evt.label, + text: evt.text, + }, + ]); + } + }; + + child.stdout?.setEncoding("utf8"); + child.stdout?.on("data", (chunk: string) => { + drainLines(chunk); + }); + child.stderr?.setEncoding("utf8"); + child.stderr?.on("data", (chunk: string) => { + stderrBuffer += chunk; + if (stderrBuffer.length > 16_000) { + stderrBuffer = stderrBuffer.slice(-16_000); + } + }); + + child.on("error", (err) => { + if (!errorText) { + errorText = `codex プロセスエラー: ${err.message}`; + } + settle(); + }); + child.on("close", (code) => { + if (code !== 0 && !resultText && !errorText) { + const tail = stderrBuffer.trim().split("\n").slice(-6).join("\n"); + errorText = `codex が exit code ${code} で終了しました${ + tail ? `:\n${tail}` : "" + }`; + } + settle(); + }); + }); +} + +// ---- Arg builder ---- + +function buildArgs(params: CodexTurnParams): string[] { + const args: string[] = []; + + if (params.resumeThreadId) { + args.push("exec", "resume", params.resumeThreadId); + } else { + args.push("exec"); + } + + args.push("--json"); + args.push("--full-auto"); + args.push("--sandbox", "workspace-write"); + + if (params.customSystemPrompt) { + args.push("--developer-instructions", params.customSystemPrompt); + } + + if ( + params.codexModel && + (CODEX_MODEL_OPTIONS as readonly string[]).includes(params.codexModel) + ) { + args.push("--model", params.codexModel); + } else if (params.codexModel) { + console.warn( + "[todo-daemon:codex] ignoring unknown codexModel:", + params.codexModel, + ); + } + + if ( + params.codexEffort && + (CODEX_EFFORT_OPTIONS as readonly string[]).includes(params.codexEffort) + ) { + args.push("--config", `model_reasoning_effort=${params.codexEffort}`); + } else if (params.codexEffort) { + console.warn( + "[todo-daemon:codex] ignoring unknown codexEffort:", + params.codexEffort, + ); + } + + args.push(params.prompt); + + return args; +} + +// ---- Codex event classifier ---- + +interface ClassifiedEvent { + kind: TodoStreamEventKind; + label: string; + text: string; +} + +interface ClassifiedCodexLine { + threadId: string | null; + resultText: string | null; + numTurns: number | null; + errorText: string | null; + events: ClassifiedEvent[]; +} + +function classifyCodexEvent(payload: unknown): ClassifiedCodexLine { + const empty: ClassifiedCodexLine = { + threadId: null, + resultText: null, + numTurns: null, + errorText: null, + events: [], + }; + if (typeof payload !== "object" || payload === null) return empty; + const rec = payload as Record; + const type = typeof rec.type === "string" ? (rec.type as string) : ""; + + if (type === "thread.started") { + const threadId = + typeof rec.thread_id === "string" ? (rec.thread_id as string) : null; + return { + ...empty, + threadId, + events: [ + { + kind: "system_init", + label: "init", + text: `thread ${threadId ?? "?"} 準備完了`, + }, + ], + }; + } + + if (type === "turn.started") { + return empty; + } + + if (type === "turn.completed") { + const usage = rec.usage as + | { input_tokens?: number; output_tokens?: number } + | undefined; + const tokens = usage + ? `${usage.input_tokens ?? 0} in / ${usage.output_tokens ?? 0} out` + : ""; + return { + ...empty, + numTurns: 1, + events: [ + { + kind: "result", + label: "turn completed", + text: tokens ? `ターン完了 (${tokens})` : "ターン完了", + }, + ], + }; + } + + if (type === "turn.failed") { + const error = rec.error as { message?: string } | undefined; + const msg = error?.message ?? "不明なエラー"; + return { + ...empty, + errorText: msg, + events: [{ kind: "error", label: "error", text: msg }], + }; + } + + if ( + type === "item.started" || + type === "item.updated" || + type === "item.completed" + ) { + const item = rec.item as Record | undefined; + if (!item) return empty; + return classifyItem(item, type); + } + + if (type === "error") { + const message = + typeof rec.message === "string" + ? (rec.message as string) + : JSON.stringify(rec).slice(0, 400); + return { + ...empty, + errorText: message, + events: [{ kind: "error", label: "error", text: message }], + }; + } + + return empty; +} + +function classifyItem( + item: Record, + eventType: string, +): ClassifiedCodexLine { + const empty: ClassifiedCodexLine = { + threadId: null, + resultText: null, + numTurns: null, + errorText: null, + events: [], + }; + const itemType = typeof item.type === "string" ? (item.type as string) : ""; + + if (itemType === "agent_message") { + const text = typeof item.text === "string" ? (item.text as string) : null; + if (!text) return empty; + return { + ...empty, + resultText: eventType === "item.completed" ? text : null, + events: [ + { + kind: + eventType === "item.completed" + ? "assistant_text" + : "assistant_text", + label: "Codex", + text, + }, + ], + }; + } + + if ( + itemType === "command_execution" || + itemType === "file_edit" || + itemType === "code_edit" + ) { + const command = + typeof item.command === "string" + ? (item.command as string) + : typeof item.path === "string" + ? (item.path as string) + : itemType; + const label = + itemType === "command_execution" + ? "Bash" + : itemType === "file_edit" + ? "Edit" + : "tool"; + return { + ...empty, + events: [ + { + kind: "tool_use", + label, + text: truncate(command, 300), + }, + ], + }; + } + + if (itemType === "tool_result" || itemType === "command_output") { + const text = + typeof item.text === "string" + ? (item.text as string) + : typeof item.output === "string" + ? (item.output as string) + : null; + if (!text) return empty; + return { + ...empty, + events: [ + { + kind: "tool_result", + label: "tool result", + text: truncate(text, 400), + }, + ], + }; + } + + return empty; +} + +// ---- Helpers ---- + +function killProcessTree(pid: number, signal: NodeJS.Signals): void { + if (process.platform === "win32") { + try { + const killer = spawn("taskkill", ["/pid", String(pid), "/T", "/F"], { + stdio: "ignore", + detached: true, + }); + killer.on("error", () => { + /* best-effort */ + }); + killer.unref(); + } catch { + // best-effort + } + return; + } + try { + process.kill(-pid, signal); + } catch { + try { + process.kill(pid, signal); + } catch { + // ignore + } + } +} + +function truncate(text: string, cap: number): string { + if (text.length <= cap) return text; + return `${text.slice(0, cap)}…`; +} + +function appendRawEvent( + sessionId: string, + iteration: number, + kind: TodoStreamEventKind, + label: string, + text: string, +): void { + getTodoSessionStore().appendStreamEvents(sessionId, [ + { + id: randomUUID(), + ts: Date.now(), + iteration, + kind, + label, + text, + }, + ]); +} diff --git a/apps/desktop/src/main/todo-daemon/crush-turn-runner.ts b/apps/desktop/src/main/todo-daemon/crush-turn-runner.ts new file mode 100644 index 00000000000..14f975ab774 --- /dev/null +++ b/apps/desktop/src/main/todo-daemon/crush-turn-runner.ts @@ -0,0 +1,482 @@ +import { type ChildProcess, spawn } from "node:child_process"; +import { randomUUID } from "node:crypto"; +import * as fs from "node:fs"; +import * as path from "node:path"; +import Database from "better-sqlite3"; +import type { TodoStreamEventKind } from "main/todo-agent/types"; + +/** + * Crush CLI (`crush run`) turn runner. + * + * Runs a single Crush iteration via `crush run --yolo` and monitors + * progress by polling the project-local SQLite database + * (`/.crush/crush.db`). Crush does not emit structured + * streaming events on stdout (unlike Claude Code or Codex), but it + * writes every message — including tool calls, tool results, and + * finish events — into `crush.db` in real time. We poll this DB and + * convert new rows into the same `TodoStreamEvent` shape the + * supervisor engine uses. + * + * Session resume is supported via `crush run --session `. + * + * See: https://github.com/charmbracelet/crush + */ + +export interface CrushTurnParams { + sessionId: string; + iteration: number; + cwd: string; + prompt: string; + resumeSessionId: string | null; + customSystemPrompt: string | null; + crushModel: string | null; + signal: AbortSignal; + onChild: (child: ChildProcess) => void; + emit: (event: CrushStreamEvent) => void; +} + +export interface CrushTurnResult { + result: string | null; + sessionId: string | null; + costUsd: number | null; + numTurns: number | null; + error: string | null; + interrupted: boolean; +} + +export interface CrushStreamEvent { + id: string; + ts: number; + iteration: number; + kind: TodoStreamEventKind; + label: string; + text: string; + toolUseId?: string; +} + +const CRUSH_BIN = + process.env.TODO_CRUSH_BIN || process.env.CRUSH_BIN || "crush"; + +const POLL_INTERVAL_MS = 250; + +// ---- Main entry point ---- + +export async function runCrushTurn( + params: CrushTurnParams, +): Promise { + const args = buildArgs(params); + + let child: ChildProcess; + try { + child = spawn(CRUSH_BIN, args, { + cwd: params.cwd, + env: { ...process.env }, + detached: process.platform !== "win32", + }); + } catch (error) { + return { + result: null, + sessionId: null, + costUsd: null, + numTurns: null, + error: + error instanceof Error + ? `crush を起動できませんでした: ${error.message}` + : "crush を起動できませんでした", + interrupted: false, + }; + } + + params.onChild(child); + + let crushSessionId: string | null = null; + let resultText: string | null = null; + let costUsd: number | null = null; + let numTurns = 0; + let errorText: string | null = null; + let interrupted = false; + + // Extract session id from stderr: + // "INFO Created session for non-interactive run session_id=..." + let stderrBuffer = ""; + child.stderr?.on("data", (chunk: Buffer) => { + stderrBuffer += chunk.toString("utf8"); + const match = stderrBuffer.match( + /session_id=([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/, + ); + if (match && !crushSessionId) { + crushSessionId = match[1]; + params.emit({ + id: randomUUID(), + ts: Date.now(), + iteration: params.iteration, + kind: "system_init", + label: "Crush", + text: `Crush セッション開始 (id: ${crushSessionId.slice(0, 8)}...)`, + }); + } + }); + + // Collect stdout (final result text) + let stdoutBuffer = ""; + child.stdout?.on("data", (chunk: Buffer) => { + stdoutBuffer += chunk.toString("utf8"); + }); + + // Signal that the child has exited so the DB poll loop can stop. + let childExited = false; + + // Start DB polling in parallel with process execution + const pollPromise = pollDbForEvents( + params, + () => crushSessionId, + () => childExited, + ); + + // Wait for child to exit + const exitCode = await new Promise((resolve) => { + child.on("close", (code) => { + childExited = true; + resolve(code); + }); + child.on("error", (err) => { + errorText = `crush プロセスエラー: ${err.message}`; + childExited = true; + resolve(null); + }); + + if (params.signal.aborted) { + killProcess(child); + interrupted = true; + childExited = true; + resolve(null); + return; + } + params.signal.addEventListener( + "abort", + () => { + interrupted = true; + killProcess(child); + }, + { once: true }, + ); + }); + + // Give DB polling one final sweep after process exits + const pollResult = await pollPromise; + + // Collect final result + resultText = stdoutBuffer.trim() || pollResult.lastAssistantText || null; + numTurns = pollResult.numTurns; + + // Read cost from DB if available + if (crushSessionId) { + const dbPath = findCrushDb(params.cwd); + if (dbPath) { + try { + const db = new Database(dbPath, { readonly: true }); + const row = db + .prepare("SELECT cost FROM sessions WHERE id = ?") + .get(crushSessionId) as { cost: number } | undefined; + if (row && typeof row.cost === "number" && Number.isFinite(row.cost)) { + costUsd = row.cost; + } + db.close(); + } catch { + // best-effort + } + } + } + + if (exitCode !== 0 && exitCode !== null && !interrupted) { + errorText = errorText ?? `crush が終了コード ${exitCode} で終了しました`; + } + + return { + result: resultText, + sessionId: crushSessionId, + costUsd, + numTurns: numTurns || null, + error: errorText, + interrupted, + }; +} + +// ---- Arg builder ---- + +function buildArgs(params: CrushTurnParams): string[] { + const args = ["run", "--yolo"]; + + if (params.crushModel) { + args.push("--model", params.crushModel); + } + + if (params.resumeSessionId) { + args.push("--session", params.resumeSessionId); + } + + // Prepend system instructions since Crush `run` has no dedicated flag. + const promptParts: string[] = []; + if (params.customSystemPrompt) { + promptParts.push( + `[System Instructions]\n${params.customSystemPrompt}\n[End System Instructions]\n\n`, + ); + } + promptParts.push(params.prompt); + args.push(promptParts.join("")); + + return args; +} + +// ---- DB polling ---- + +interface PollResult { + lastAssistantText: string | null; + numTurns: number; +} + +async function pollDbForEvents( + params: CrushTurnParams, + getSessionId: () => string | null, + isChildExited: () => boolean, +): Promise { + let lastSeenCreatedAt = 0; + let lastAssistantText: string | null = null; + let numTurns = 0; + let settled = false; + + while (!settled && !params.signal.aborted) { + await sleep(POLL_INTERVAL_MS); + + const sessionId = getSessionId(); + if (!sessionId) continue; + + const dbPath = findCrushDb(params.cwd); + if (!dbPath) continue; + + let db: Database.Database | null = null; + try { + db = new Database(dbPath, { readonly: true }); + const rows = db + .prepare( + "SELECT id, role, parts, created_at FROM messages WHERE session_id = ? AND created_at > ? ORDER BY created_at ASC", + ) + .all(sessionId, lastSeenCreatedAt) as Array<{ + id: string; + role: string; + parts: string; + created_at: number; + }>; + + for (const row of rows) { + lastSeenCreatedAt = Math.max(lastSeenCreatedAt, row.created_at); + const parts = safeParseJson(row.parts); + if (!parts) continue; + + for (const part of parts) { + const events = classifyPart(part, row.role, params.iteration); + for (const evt of events) { + params.emit(evt); + if (evt.kind === "assistant_text" && evt.text) { + lastAssistantText = evt.text; + } + if (evt.kind === "result") { + numTurns++; + } + } + } + } + + // Check if session has finished (finish reason in last assistant message) + if (rows.length > 0) { + const lastRow = rows[rows.length - 1]; + const lastParts = safeParseJson(lastRow.parts); + if (lastParts) { + for (const p of lastParts) { + if ( + p.type === "finish" && + (p.data?.reason === "end_turn" || p.data?.reason === "stop") + ) { + settled = true; + } + } + } + } + } catch { + // DB may be locked or not yet created — retry + } finally { + db?.close(); + } + + // Also stop polling once the child process has exited. + // This handles cases where the DB never gets a finish event + // (e.g. error, signal, or DB race). + if (isChildExited()) { + settled = true; + } + } + + return { lastAssistantText, numTurns }; +} + +// ---- Event classification ---- + +function classifyPart( + part: PartData, + role: string, + iteration: number, +): CrushStreamEvent[] { + const events: CrushStreamEvent[] = []; + const ts = Date.now(); + + if (part.type === "text" && role === "assistant") { + const text = part.data?.text ?? ""; + if (text.length > 0) { + events.push({ + id: randomUUID(), + ts, + iteration, + kind: "assistant_text", + label: "Crush", + text: truncate(text, 4000), + }); + } + } else if (part.type === "tool_call") { + const name = part.data?.name ?? "unknown"; + const input = part.data?.input ?? ""; + const id = part.data?.id; + events.push({ + id: randomUUID(), + ts, + iteration, + kind: "tool_use", + label: toolLabel(name), + text: truncate( + typeof input === "string" ? input : JSON.stringify(input), + 2000, + ), + toolUseId: id, + }); + } else if (part.type === "tool_result") { + const content = part.data?.content ?? ""; + const isError = part.data?.is_error ?? false; + const toolCallId = part.data?.tool_call_id; + events.push({ + id: randomUUID(), + ts, + iteration, + kind: isError ? "error" : "tool_result", + label: isError ? "Error" : "Result", + text: truncate( + typeof content === "string" ? content : JSON.stringify(content), + 4000, + ), + toolUseId: toolCallId, + }); + } else if (part.type === "finish" && role === "assistant") { + const reason = part.data?.reason ?? "unknown"; + if ( + reason === "error" || + reason === "canceled" || + reason === "permission_denied" + ) { + events.push({ + id: randomUUID(), + ts, + iteration, + kind: "error", + label: "Crush", + text: `終了理由: ${reason}`, + }); + } else if (reason === "end_turn" || reason === "stop") { + events.push({ + id: randomUUID(), + ts, + iteration, + kind: "result", + label: "Crush", + text: `ターン完了`, + }); + } + } + // "reasoning" and "binary" parts are intentionally skipped + + return events; +} + +function toolLabel(name: string): string { + const labels: Record = { + bash: "Bash", + edit: "Edit", + write: "Write", + view: "Read", + glob: "Glob", + grep: "Grep", + ls: "LS", + agent: "Agent", + fetch: "Fetch", + sourcegraph: "Sourcegraph", + multiedit: "MultiEdit", + todos: "Todos", + }; + return labels[name] ?? name; +} + +// ---- Helpers ---- + +function killProcess(child: ChildProcess) { + if (!child.pid) return; + try { + if (process.platform === "win32") { + spawn("taskkill", ["/pid", String(child.pid), "/T", "/F"]); + } else { + process.kill(-child.pid, "SIGKILL"); + } + } catch { + child.kill("SIGKILL"); + } +} + +function findCrushDb(cwd: string): string | null { + const dbPath = path.join(cwd, ".crush", "crush.db"); + try { + fs.accessSync(dbPath); + return dbPath; + } catch { + return null; + } +} + +function sleep(ms: number): Promise { + return new Promise((resolve) => setTimeout(resolve, ms)); +} + +function safeParseJson(text: string): PartData[] | null { + try { + const parsed = JSON.parse(text); + return Array.isArray(parsed) ? (parsed as PartData[]) : null; + } catch { + return null; + } +} + +function truncate(text: string, maxLen: number): string { + if (text.length <= maxLen) return text; + return `${text.slice(0, maxLen)}…`; +} + +interface PartData { + type: string; + data?: { + text?: string; + thinking?: string; + id?: string; + name?: string; + input?: string; + tool_call_id?: string; + content?: string; + is_error?: boolean; + reason?: string; + [key: string]: unknown; + }; +} diff --git a/apps/desktop/src/main/todo-daemon/index.ts b/apps/desktop/src/main/todo-daemon/index.ts new file mode 100644 index 00000000000..8571bf4f0ce --- /dev/null +++ b/apps/desktop/src/main/todo-daemon/index.ts @@ -0,0 +1,593 @@ +/** + * TODO Agent Daemon + * + * Standalone background process that owns `claude -p` child processes + * for the autonomous TODO agent. Survives app restarts so users don't + * lose in-flight sessions when they close the desktop app. + * + * Run with: ELECTRON_RUN_AS_NODE=1 electron dist/main/todo-daemon.js + * + * IPC: + * - NDJSON over Unix domain socket at ~/.superset/todo-daemon.sock + * - Auth token at ~/.superset/todo-daemon.token + * + * Issue: https://github.com/MocA-Love/superset/issues/237 + */ + +import { randomBytes } from "node:crypto"; +import { + chmodSync, + existsSync, + mkdirSync, + readFileSync, + unlinkSync, + writeFileSync, +} from "node:fs"; +import { createServer, type Server, Socket } from "node:net"; +import { homedir } from "node:os"; +import { join } from "node:path"; +import { SUPERSET_DIR_NAME } from "shared/constants"; +import { + type AbortRequest, + type HelloRequest, + type HelloResponse, + type IpcErrorResponse, + type IpcEvent, + type IpcRequest, + type IpcSuccessResponse, + type ListActiveResponse, + type QueueInterventionRequest, + type ResumeWaitingRequest, + type SessionStateEventPayload, + type SessionStreamEventPayload, + type ShutdownRequest, + type StartRequest, + TODO_DAEMON_PROTOCOL_VERSION, +} from "../lib/todo-daemon/types"; +import { getTodoSessionStore } from "../todo-agent/session-store"; +import { invalidateTodoSettingsCache } from "../todo-agent/settings"; +import type { + TodoSessionStateEvent, + TodoStreamUpdate, +} from "../todo-agent/types"; +import { TodoSupervisorEngine } from "./supervisor-engine"; + +const DAEMON_VERSION = "1.0.0"; +const SUPERSET_HOME_DIR = join(homedir(), SUPERSET_DIR_NAME); +const SOCKET_PATH = join(SUPERSET_HOME_DIR, "todo-daemon.sock"); +const TOKEN_PATH = join(SUPERSET_HOME_DIR, "todo-daemon.token"); +const PID_PATH = join(SUPERSET_HOME_DIR, "todo-daemon.pid"); + +// ============================================================================ +// Logging +// ============================================================================ + +function log( + level: "info" | "warn" | "error", + message: string, + data?: unknown, +): void { + const timestamp = new Date().toISOString(); + const prefix = `[${timestamp}] [todo-daemon] [${level.toUpperCase()}]`; + if (data !== undefined) { + console.log(`${prefix} ${message}`, data); + } else { + console.log(`${prefix} ${message}`); + } +} + +// ============================================================================ +// Auth +// ============================================================================ + +let authToken: string; + +function ensureAuthToken(): string { + if (existsSync(TOKEN_PATH)) { + return readFileSync(TOKEN_PATH, "utf-8").trim(); + } + const token = randomBytes(32).toString("hex"); + writeFileSync(TOKEN_PATH, token, { mode: 0o600 }); + log("info", "Generated new auth token"); + return token; +} + +function validateToken(token: string): boolean { + return token === authToken; +} + +// ============================================================================ +// NDJSON +// ============================================================================ + +class NdjsonParser { + private buffer = ""; + + parse(chunk: string): IpcRequest[] { + this.buffer += chunk; + const messages: IpcRequest[] = []; + let newlineIndex = this.buffer.indexOf("\n"); + while (newlineIndex !== -1) { + const line = this.buffer.slice(0, newlineIndex); + this.buffer = this.buffer.slice(newlineIndex + 1); + if (line.trim()) { + try { + messages.push(JSON.parse(line)); + } catch { + log("warn", "Failed to parse NDJSON line"); + } + } + newlineIndex = this.buffer.indexOf("\n"); + } + return messages; + } +} + +function sendResponse( + socket: Socket, + response: IpcSuccessResponse | IpcErrorResponse, +): void { + try { + socket.write(`${JSON.stringify(response)}\n`); + } catch (error) { + log("warn", "Failed to write response", error); + } +} + +function sendSuccess(socket: Socket, id: string, payload: unknown): void { + sendResponse(socket, { id, ok: true, payload }); +} + +function sendError( + socket: Socket, + id: string, + code: string, + message: string, +): void { + sendResponse(socket, { id, ok: false, error: { code, message } }); +} + +// ============================================================================ +// Event broadcasting +// ============================================================================ + +interface ClientState { + authenticated: boolean; +} + +/** + * Sockets eligible for event broadcasts. A socket is inserted into this + * set ONLY after it successfully authenticates via `hello` — otherwise + * any local process that can open the socket path could read session + * prompts and output without presenting the auth token. + */ +const authenticatedClients = new Set(); + +function broadcastEvent(event: IpcEvent): void { + const msg = `${JSON.stringify(event)}\n`; + for (const socket of authenticatedClients) { + try { + socket.write(msg); + } catch { + // best-effort + } + } +} + +function broadcastSessionState(event: TodoSessionStateEvent): void { + const payload: SessionStateEventPayload = { session: event.session }; + broadcastEvent({ + type: "event", + event: "sessionState", + payload, + }); +} + +function broadcastStreamUpdate(update: TodoStreamUpdate): void { + const payload: SessionStreamEventPayload = { + sessionId: update.sessionId, + events: update.events, + }; + broadcastEvent({ + type: "event", + event: "streamEvents", + payload, + }); +} + +// ============================================================================ +// Engine + store wiring +// ============================================================================ + +let engine: TodoSupervisorEngine; + +/** + * Subscribed session IDs. The session-store uses a per-session + * EventEmitter topic, so the bridge has to attach a listener for each + * session it wants to forward. `listenSession` is idempotent. + */ +const subscribedSessionIds = new Set(); + +function listenSession(sessionId: string): void { + if (subscribedSessionIds.has(sessionId)) return; + subscribedSessionIds.add(sessionId); + const store = getTodoSessionStore(); + store.subscribe(sessionId, (event) => broadcastSessionState(event)); + store.subscribeStream(sessionId, (update) => broadcastStreamUpdate(update)); +} + +function wireStoreBridge(): void { + // Attach listeners to every session currently in the DB so rehydrate + // and daemon-restart emits reach whatever client is connected. The + // `listAll` snapshot is one SQL query — cheap even for power users. + for (const row of getTodoSessionStore().listAll()) { + listenSession(row.id); + } +} + +// ============================================================================ +// Request handlers +// ============================================================================ + +type Handler = ( + socket: Socket, + id: string, + payload: unknown, + clientState: ClientState, +) => void | Promise; + +const handlers: Record = { + hello: (socket, id, payload, clientState) => { + const request = payload as HelloRequest; + if (request.protocolVersion !== TODO_DAEMON_PROTOCOL_VERSION) { + sendError( + socket, + id, + "PROTOCOL_MISMATCH", + `Protocol version mismatch. Expected ${TODO_DAEMON_PROTOCOL_VERSION}, got ${request.protocolVersion}`, + ); + return; + } + if (!validateToken(request.token)) { + sendError(socket, id, "AUTH_FAILED", "Invalid auth token"); + return; + } + clientState.authenticated = true; + // Only now is this socket eligible to receive broadcast events. + // Adding it earlier would leak session data to any process that + // can open the socket path without presenting the token. + authenticatedClients.add(socket); + const response: HelloResponse = { + protocolVersion: TODO_DAEMON_PROTOCOL_VERSION, + daemonVersion: DAEMON_VERSION, + daemonPid: process.pid, + activeSessionIds: engine.listActiveSessionIds(), + }; + sendSuccess(socket, id, response); + }, + start: async (socket, id, payload, clientState) => { + if (!clientState.authenticated) { + sendError(socket, id, "NOT_AUTHENTICATED", "Authenticate first"); + return; + } + const request = payload as StartRequest; + try { + listenSession(request.sessionId); + await engine.start(request.sessionId, { + fromScheduledWakeup: request.fromScheduledWakeup, + }); + sendSuccess(socket, id, { success: true }); + } catch (error) { + const msg = error instanceof Error ? error.message : String(error); + sendError(socket, id, "START_FAILED", msg); + } + }, + abort: (socket, id, payload, clientState) => { + if (!clientState.authenticated) { + sendError(socket, id, "NOT_AUTHENTICATED", "Authenticate first"); + return; + } + const request = payload as AbortRequest; + try { + listenSession(request.sessionId); + engine.abort(request.sessionId); + sendSuccess(socket, id, { success: true }); + } catch (error) { + const msg = error instanceof Error ? error.message : String(error); + sendError(socket, id, "ABORT_FAILED", msg); + } + }, + queueIntervention: (socket, id, payload, clientState) => { + if (!clientState.authenticated) { + sendError(socket, id, "NOT_AUTHENTICATED", "Authenticate first"); + return; + } + const request = payload as QueueInterventionRequest; + try { + listenSession(request.sessionId); + engine.queueIntervention(request.sessionId, request.data); + sendSuccess(socket, id, { success: true }); + } catch (error) { + const msg = error instanceof Error ? error.message : String(error); + sendError(socket, id, "INTERVENTION_FAILED", msg); + } + }, + resumeWaiting: async (socket, id, payload, clientState) => { + if (!clientState.authenticated) { + sendError(socket, id, "NOT_AUTHENTICATED", "Authenticate first"); + return; + } + const request = payload as ResumeWaitingRequest; + try { + for (const sid of request.sessionIds) { + listenSession(sid); + await engine.start(sid); + } + sendSuccess(socket, id, { success: true }); + } catch (error) { + const msg = error instanceof Error ? error.message : String(error); + sendError(socket, id, "RESUME_FAILED", msg); + } + }, + settingsChanged: (socket, id, _payload, clientState) => { + if (!clientState.authenticated) { + sendError(socket, id, "NOT_AUTHENTICATED", "Authenticate first"); + return; + } + invalidateTodoSettingsCache(); + engine.handleSettingsChanged(); + sendSuccess(socket, id, { success: true }); + }, + rehydrate: (socket, id, _payload, clientState) => { + if (!clientState.authenticated) { + sendError(socket, id, "NOT_AUTHENTICATED", "Authenticate first"); + return; + } + const store = getTodoSessionStore(); + const rows = store.listAll(); + for (const row of rows) { + listenSession(row.id); + } + const n = store.rehydrateStrandedSessionsExcept( + engine.listActiveSessionIds(), + ); + log("info", `Rehydrated ${n} stranded session(s) on client request`); + sendSuccess(socket, id, { success: true }); + }, + listActive: (socket, id, _payload, clientState) => { + if (!clientState.authenticated) { + sendError(socket, id, "NOT_AUTHENTICATED", "Authenticate first"); + return; + } + const response: ListActiveResponse = { + sessionIds: engine.listActiveSessionIds(), + }; + sendSuccess(socket, id, response); + }, + shutdown: (socket, id, payload, clientState) => { + if (!clientState.authenticated) { + sendError(socket, id, "NOT_AUTHENTICATED", "Authenticate first"); + return; + } + const request = payload as ShutdownRequest; + log("info", "Shutdown requested", request); + sendSuccess(socket, id, { success: true }); + setTimeout(() => { + engine.shutdownAll({ killChildren: !!request.killSessions }); + void stopServer().then(() => process.exit(0)); + }, 100); + }, +}; + +async function handleRequest( + socket: Socket, + request: IpcRequest, + clientState: ClientState, +): Promise { + const handler = handlers[request.type]; + if (!handler) { + sendError( + socket, + request.id, + "UNKNOWN_REQUEST", + `Unknown request type: ${request.type}`, + ); + return; + } + try { + await handler(socket, request.id, request.payload, clientState); + } catch (error) { + const msg = error instanceof Error ? error.message : String(error); + sendError(socket, request.id, "INTERNAL_ERROR", msg); + log("error", `Handler error for ${request.type}`, msg); + } +} + +// ============================================================================ +// Server +// ============================================================================ + +let server: Server | null = null; + +function handleConnection(socket: Socket): void { + const parser = new NdjsonParser(); + const clientState: ClientState = { authenticated: false }; + socket.setEncoding("utf-8"); + + socket.on("data", (data: string) => { + const messages = parser.parse(data); + for (const message of messages) { + handleRequest(socket, message, clientState).catch((error) => { + log( + "error", + "Unhandled request error", + error instanceof Error ? error.message : String(error), + ); + }); + } + }); + + socket.on("close", () => { + authenticatedClients.delete(socket); + }); + + socket.on("error", (error) => { + log("warn", `Socket error`, error.message); + authenticatedClients.delete(socket); + }); +} + +function isSocketLive(): Promise { + return new Promise((resolve) => { + if (!existsSync(SOCKET_PATH)) { + resolve(false); + return; + } + const probe = new Socket(); + const timeout = setTimeout(() => { + probe.destroy(); + resolve(false); + }, 1_000); + probe.on("connect", () => { + clearTimeout(timeout); + probe.destroy(); + resolve(true); + }); + probe.on("error", () => { + clearTimeout(timeout); + resolve(false); + }); + probe.connect(SOCKET_PATH); + }); +} + +async function startServer(): Promise { + if (!existsSync(SUPERSET_HOME_DIR)) { + mkdirSync(SUPERSET_HOME_DIR, { recursive: true, mode: 0o700 }); + } + try { + chmodSync(SUPERSET_HOME_DIR, 0o700); + } catch { + // may fail if not owner + } + + if (existsSync(SOCKET_PATH)) { + const live = await isSocketLive(); + if (live) { + log("error", "Another daemon is already running"); + throw new Error("Another daemon is already running"); + } + try { + unlinkSync(SOCKET_PATH); + } catch (error) { + throw new Error(`Failed to remove stale socket: ${error}`); + } + } + if (existsSync(PID_PATH)) { + try { + unlinkSync(PID_PATH); + } catch { + // ignore + } + } + + authToken = ensureAuthToken(); + engine = new TodoSupervisorEngine(); + wireStoreBridge(); + + // Mark any sessions the previous daemon left mid-run as failed. + getTodoSessionStore().rehydrateStrandedSessionsExcept( + engine.listActiveSessionIds(), + ); + + const newServer = createServer(handleConnection); + server = newServer; + await new Promise((resolve, reject) => { + newServer.on("error", (error: NodeJS.ErrnoException) => { + if (error.code === "EADDRINUSE") { + log("error", "Socket already in use"); + reject(new Error("Socket already in use")); + } else { + log("error", "Server error", error.message); + reject(error); + } + }); + newServer.listen(SOCKET_PATH, () => { + try { + chmodSync(SOCKET_PATH, 0o600); + } catch { + // ignore + } + writeFileSync(PID_PATH, String(process.pid), { mode: 0o600 }); + log("info", `Daemon started on ${SOCKET_PATH}, PID=${process.pid}`); + resolve(); + }); + }); +} + +async function stopServer(): Promise { + for (const socket of authenticatedClients) { + try { + socket.destroy(); + } catch { + // ignore + } + } + authenticatedClients.clear(); + await new Promise((resolve) => { + if (server) { + server.close(() => resolve()); + } else { + resolve(); + } + }); + try { + if (existsSync(SOCKET_PATH)) unlinkSync(SOCKET_PATH); + if (existsSync(PID_PATH)) unlinkSync(PID_PATH); + } catch { + // best-effort + } +} + +// ============================================================================ +// Signal handling +// ============================================================================ + +function setupSignalHandlers(): void { + const onSignal = (sig: string) => { + log("info", `Received ${sig}, shutting down`); + if (engine) engine.shutdownAll({ killChildren: true }); + void stopServer().then(() => process.exit(0)); + }; + process.on("SIGTERM", () => onSignal("SIGTERM")); + process.on("SIGINT", () => onSignal("SIGINT")); + process.on("SIGHUP", () => onSignal("SIGHUP")); + process.on("uncaughtException", (error) => { + log("error", "Uncaught exception", error); + }); + process.on("unhandledRejection", (reason) => { + log("error", "Unhandled rejection", reason); + }); +} + +// ============================================================================ +// Main +// ============================================================================ + +async function main(): Promise { + log("info", "TODO Agent daemon starting…"); + log("info", `Environment: ${process.env.NODE_ENV || "production"}`); + setupSignalHandlers(); + try { + await startServer(); + } catch (error) { + log( + "error", + "Failed to start", + error instanceof Error ? error.message : String(error), + ); + process.exit(1); + } +} + +void main(); diff --git a/apps/desktop/src/main/todo-daemon/pty-turn-runner.ts b/apps/desktop/src/main/todo-daemon/pty-turn-runner.ts new file mode 100644 index 00000000000..8eeeaadbe57 --- /dev/null +++ b/apps/desktop/src/main/todo-daemon/pty-turn-runner.ts @@ -0,0 +1,1136 @@ +import { randomUUID } from "node:crypto"; +import * as fs from "node:fs"; +import * as os from "node:os"; +import * as path from "node:path"; +import { getTodoSessionStore } from "main/todo-agent/session-store"; +import { + CLAUDE_EFFORT_OPTIONS, + CLAUDE_MODEL_OPTIONS, + type TodoStreamEvent, + type TodoStreamEventKind, +} from "main/todo-agent/types"; +import type { IPty } from "node-pty"; +import * as pty from "node-pty"; + +/** + * PTY-mode Claude Code turn runner. + * + * Runs a single Claude Code iteration as an interactive TUI behind a + * PTY (instead of the default `claude -p` headless stream-json path). + * Structured events are pulled from the session JSONL transcript at + * `~/.claude/projects//.jsonl`; turn-end is + * detected via a Stop hook injected through `--settings`. See + * `apps/desktop/plans/20260417-todo-agent-remote-control.md`. + * + * Enabled only when the daemon process starts with `TODO_ENGINE=pty` + * **and** the session row has `remote_control_enabled` set. The + * supervisor engine routes to this runner instead of the headless + * implementation in `supervisor-engine.ts` under that condition. + */ + +// Public shape must match `runClaudeTurn` in supervisor-engine.ts so +// callers can swap implementations transparently. +export interface PtyTurnParams { + sessionId: string; + iteration: number; + cwd: string; + prompt: string; + resumeSessionId: string | null; + customSystemPrompt: string | null; + claudeModel: string | null; + claudeEffort: string | null; + signal: AbortSignal; + onChild: (handle: { + pid: number | null; + kill: () => void; + /** + * Register a callback invoked when the spawned PTY process + * exits. The supervisor uses this to clear its SIGKILL + * fallback timer so it does not fire against a terminated or + * recycled PID (CodeRabbit review). + */ + onExit: (cb: () => void) => void; + }) => void; + /** Whether to send `/remote-control` after the PTY is ready. */ + remoteControlEnabled: boolean; +} + +export interface PtyTurnResult { + result: string | null; + sessionId: string | null; + costUsd: number | null; + numTurns: number | null; + error: string | null; + interrupted: boolean; + scheduledWakeup: { delayMs: number; reason: string | null } | null; +} + +// ============================================================================= +// Constants +// ============================================================================= + +/** Path of the POSIX executable whose session JSONL we tail. Falls + * back to `claude` on PATH when unset (tests / dev shells). */ +const CLAUDE_BIN = + process.env.TODO_CLAUDE_BIN || process.env.CLAUDE_BIN || "claude"; + +/** Project transcript root used by Claude Code. */ +const CLAUDE_PROJECTS_ROOT = path.join(os.homedir(), ".claude", "projects"); + +/** How long we wait after spawn for the JSONL file to appear. */ +const JSONL_DISCOVERY_TIMEOUT_MS = 30_000; + +/** How often we poll the JSONL file for appended lines. */ +const JSONL_POLL_INTERVAL_MS = 250; + +/** Max wait for TUI to settle before we send the first prompt. */ +const TUI_READY_MAX_WAIT_MS = 25_000; + +/** Idle window after which we consider the TUI ready (no stdout). */ +const TUI_READY_IDLE_MS = 2_000; + +/** Max wait for the Stop hook to fire after a prompt is sent. */ +const STOP_HOOK_MAX_WAIT_MS = 30 * 60 * 1000; // 30 min + +/** Max wait after sending `/remote-control` for a session URL to + * appear in PTY stdout. */ +const REMOTE_CONTROL_URL_TIMEOUT_MS = 15_000; + +const REMOTE_CONTROL_URL_RE = + /https:\/\/claude\.ai\/code\/session_[A-Za-z0-9_-]+/; + +const ATTACHMENT_PATH_RE = + /!\[[^\]]*\]\(([^()\s]*[/\\]todo-agent[/\\]attachments[/\\][^)\s]+)\)/g; + +// ============================================================================= +// Public entry point +// ============================================================================= + +export async function runClaudeTurnPty( + params: PtyTurnParams, +): Promise { + const encodedCwd = encodeCwdForClaude(params.cwd); + const projectDir = path.join(CLAUDE_PROJECTS_ROOT, encodedCwd); + ensureDir(projectDir); + + // Set up the Stop-hook sink before spawning so we never miss events. + const hookSink = createHookSink(params.sessionId); + const settings = buildSettingsJson(hookSink.hookCommand); + + const args = [ + "--permission-mode", + "bypassPermissions", + "--settings", + settings, + ]; + if (params.customSystemPrompt) { + args.push("--append-system-prompt", params.customSystemPrompt); + } + if ( + params.claudeModel && + (CLAUDE_MODEL_OPTIONS as readonly string[]).includes(params.claudeModel) + ) { + args.push("--model", params.claudeModel); + } else if (params.claudeModel) { + console.warn( + "[todo-daemon:pty] ignoring unknown claudeModel:", + params.claudeModel, + ); + } + if ( + params.claudeEffort && + (CLAUDE_EFFORT_OPTIONS as readonly string[]).includes(params.claudeEffort) + ) { + args.push("--effort", params.claudeEffort); + } else if (params.claudeEffort) { + console.warn( + "[todo-daemon:pty] ignoring unknown claudeEffort:", + params.claudeEffort, + ); + } + if (params.resumeSessionId) { + args.push("--resume", params.resumeSessionId); + } + + let ptyProcess: IPty; + try { + ptyProcess = pty.spawn(CLAUDE_BIN, args, { + name: "xterm-256color", + cols: 120, + rows: 40, + cwd: params.cwd, + env: { + ...process.env, + TERM: "xterm-256color", + }, + }); + } catch (error) { + hookSink.cleanup(); + return { + result: null, + sessionId: null, + costUsd: null, + numTurns: null, + error: + error instanceof Error + ? `claude を PTY 起動できませんでした: ${error.message}` + : "claude を PTY 起動できませんでした", + interrupted: false, + scheduledWakeup: null, + }; + } + + const state: TurnState = { + claudeSessionId: params.resumeSessionId, + lastAssistantText: null, + costUsd: null, + numTurns: 0, + scheduledWakeup: null, + processedEventCount: 0, + jsonlPath: null, + jsonlReadOffset: 0, + remoteControlUrl: null, + }; + + let ptyBuffer = ""; + // Mutable flags wrapped in an object so TypeScript's control-flow + // analysis doesn't narrow the closure-captured locals to `never` + // when we read them later in the same function (the assignments + // live inside `onExit`/`onData` callbacks and are opaque to the + // analyzer). + const ptyStatus: { + alive: boolean; + exit: { exitCode: number; signal?: number } | null; + } = { alive: true, exit: null }; + // Collect onExit subscribers from the supervisor shim before the + // PTY actually exits so none are dropped even if the callback is + // registered after `onExit` fires (defensive — the supervisor is + // expected to subscribe synchronously inside `params.onChild`). + const exitSubscribers = new Set<() => void>(); + ptyProcess.onData((data) => { + ptyBuffer += data; + // Keep the buffer bounded. We only parse the last page when we + // need it (ready detection, `/remote-control` URL capture). + if (ptyBuffer.length > 64 * 1024) { + ptyBuffer = ptyBuffer.slice(-32 * 1024); + } + }); + ptyProcess.onExit((ev) => { + ptyStatus.alive = false; + ptyStatus.exit = ev; + for (const cb of Array.from(exitSubscribers)) { + exitSubscribers.delete(cb); + try { + cb(); + } catch { + /* ignore */ + } + } + }); + + params.onChild({ + pid: ptyProcess.pid ?? null, + kill: () => safeKill(ptyProcess), + onExit: (cb) => { + if (!ptyStatus.alive) { + try { + cb(); + } catch { + /* ignore */ + } + return; + } + exitSubscribers.add(cb); + }, + }); + + const abortHandler = () => { + safeKill(ptyProcess); + }; + params.signal.addEventListener("abort", abortHandler); + + // Poll state: abort / intervention / jsonl tail / hook sink. + let interrupted = false; + const interventionStore = getTodoSessionStore(); + const pollState = () => { + if (!ptyStatus.alive) return false; + if (params.signal.aborted) { + safeKill(ptyProcess); + return false; + } + const live = interventionStore.get(params.sessionId); + if (live?.pendingIntervention?.trim()) { + interrupted = true; + appendRawEvent( + params.sessionId, + params.iteration, + "system_init", + "介入", + "ユーザ介入を検知。現在のターンを中断して介入内容で再開します…", + ); + // Forcibly end the current turn. We do not send SIGINT + // through the PTY because the TUI treats ctrl-c as + // "cancel prompt"; just kill the process — the next + // iteration will re-spawn with the intervention prepended. + safeKill(ptyProcess); + return false; + } + return true; + }; + + try { + // Wait for the JSONL file to appear. For non-resume runs we + // wait until the SessionStart hook has written the runtime + // session id to disk — that is the only way to bind *this* + // PTY to *this* JSONL when multiple sessions spawn in the + // same cwd. Falling back to "first new jsonl" would make + // concurrent sessions tail each other's transcripts + // (P1 review finding). + const jsonlStartTs = Date.now(); + while (Date.now() - jsonlStartTs < JSONL_DISCOVERY_TIMEOUT_MS) { + if (!pollState()) break; + const nowJsonls = listJsonl(projectDir); + let discovered: string | null = null; + if (params.resumeSessionId) { + const expected = `${params.resumeSessionId}.jsonl`; + if (nowJsonls.includes(expected)) { + discovered = expected; + } + } else { + const runtimeSessionId = hookSink.readRuntimeSessionId(); + if (runtimeSessionId) { + const expected = `${runtimeSessionId}.jsonl`; + if (nowJsonls.includes(expected)) { + discovered = expected; + state.claudeSessionId = runtimeSessionId; + } + } + } + if (discovered) { + state.jsonlPath = path.join(projectDir, discovered); + // When resuming, skip past the existing content so we + // only see events produced by this turn. + if (params.resumeSessionId) { + try { + state.jsonlReadOffset = fs.statSync(state.jsonlPath).size; + } catch { + state.jsonlReadOffset = 0; + } + } + if (!state.claudeSessionId) { + const base = path.basename(discovered, ".jsonl"); + if (/^[0-9a-f-]{36}$/.test(base)) { + state.claudeSessionId = base; + } + } + break; + } + await sleep(200); + } + + if (!state.jsonlPath) { + const runtimeSid = hookSink.readRuntimeSessionId(); + const foundJsonls = listJsonl(projectDir); + const foundStr = + foundJsonls.length > 0 + ? `projectDir に存在するファイル: ${foundJsonls.slice(0, 5).join(", ")}${foundJsonls.length > 5 ? ` 他${foundJsonls.length - 5}件` : ""}` + : "projectDir に .jsonl ファイルが見つかりません"; + return { + result: null, + sessionId: state.claudeSessionId, + costUsd: null, + numTurns: null, + error: runtimeSid + ? `Claude Code のセッション JSONL (${runtimeSid}.jsonl) が発見できませんでした — ${foundStr}` + : `SessionStart hook が発火しなかったため JSONL を同定できませんでした (PTY 起動は成功) — ${foundStr}`, + interrupted: false, + scheduledWakeup: null, + }; + } + + // Wait for the TUI to settle so the first prompt isn't dropped. + await waitForTuiReady( + () => ptyBuffer, + () => ptyStatus.alive, + TUI_READY_MAX_WAIT_MS, + ); + if (!ptyStatus.alive) { + return ptyExitError(state, ptyStatus.exit, ptyBuffer); + } + + // `/remote-control` must be sent BEFORE the first user prompt. + // Otherwise the TUI may be busy rendering the response when we + // issue it, and the slash command gets treated as plain input. + if (params.remoteControlEnabled) { + await activateRemoteControl( + ptyProcess, + () => ptyBuffer, + (url) => { + state.remoteControlUrl = url; + appendRawEvent( + params.sessionId, + params.iteration, + "remote_control", + "Remote Control", + `接続 URL: ${url}`, + ); + }, + (errorText) => { + appendRawEvent( + params.sessionId, + params.iteration, + "remote_control_error", + "Remote Control エラー", + errorText, + ); + }, + ); + // Give the TUI a moment to settle after the slash command. + await sleep(500); + } + + // Send the prompt via bracketed paste to preserve newlines and + // avoid the TUI re-interpreting content like `/` as slash + // commands when it starts a line. + ptyProcess.write(`\x1b[200~${params.prompt}\x1b[201~`); + await sleep(200); + ptyProcess.write("\r"); + + // Tail the JSONL and wait for Stop hook or PTY exit. + const turnStartTs = Date.now(); + while (ptyStatus.alive) { + if (!pollState()) break; + await tailJsonl(state, params); + if (hookSink.hasStopEvent()) break; + if (Date.now() - turnStartTs > STOP_HOOK_MAX_WAIT_MS) { + appendRawEvent( + params.sessionId, + params.iteration, + "error", + "timeout", + "Stop hook が発火しないまま PTY ターンがタイムアウトしました", + ); + break; + } + await sleep(JSONL_POLL_INTERVAL_MS); + } + + // Drain any lines written after the last poll. + await tailJsonl(state, params); + + if (interrupted) { + return { + result: state.lastAssistantText, + sessionId: state.claudeSessionId, + costUsd: state.costUsd, + numTurns: state.numTurns || null, + error: null, + interrupted: true, + scheduledWakeup: state.scheduledWakeup, + }; + } + + if ( + !ptyStatus.alive && + (ptyStatus.exit?.exitCode ?? 0) !== 0 && + !state.lastAssistantText + ) { + return ptyExitError(state, ptyStatus.exit, ptyBuffer); + } + + return { + result: state.lastAssistantText, + sessionId: state.claudeSessionId, + costUsd: state.costUsd, + numTurns: state.numTurns || null, + error: null, + interrupted: false, + scheduledWakeup: state.scheduledWakeup, + }; + } finally { + params.signal.removeEventListener("abort", abortHandler); + // End the interactive session cleanly. The PTY may already have + // exited (Stop hook path often corresponds to a long-lived TUI + // waiting for the next prompt); tell it to exit so the next + // iteration can start fresh with --resume. + if (ptyStatus.alive) { + try { + ptyProcess.write("/exit\r"); + } catch { + /* ignore */ + } + await sleep(300); + if (ptyStatus.alive) safeKill(ptyProcess); + } + hookSink.cleanup(); + } +} + +// ============================================================================= +// Helpers +// ============================================================================= + +interface TurnState { + claudeSessionId: string | null; + lastAssistantText: string | null; + costUsd: number | null; + numTurns: number; + scheduledWakeup: { delayMs: number; reason: string | null } | null; + processedEventCount: number; + jsonlPath: string | null; + jsonlReadOffset: number; + remoteControlUrl: string | null; +} + +function encodeCwdForClaude(cwd: string): string { + // Claude Code replaces every non-alphanumeric character with `-`. + return cwd.replace(/[^a-zA-Z0-9]/g, "-"); +} + +function ensureDir(p: string): void { + try { + fs.mkdirSync(p, { recursive: true }); + } catch { + /* ignore */ + } +} + +function listJsonl(dir: string): string[] { + try { + return fs.readdirSync(dir).filter((f) => f.endsWith(".jsonl")); + } catch { + return []; + } +} + +function sleep(ms: number): Promise { + return new Promise((r) => setTimeout(r, ms)); +} + +function safeKill(p: IPty): void { + try { + p.kill(); + } catch { + /* ignore */ + } +} + +function buildSettingsJson(hookCommand: (event: string) => string): string { + const settings = { + hooks: { + Stop: [ + { + matcher: "", + hooks: [{ type: "command", command: hookCommand("Stop") }], + }, + ], + // SessionStart captures Claude's runtime `session_id` so the + // daemon can unambiguously identify which JSONL file this + // spawn owns, even when multiple PTY sessions start + // concurrently in the same cwd (P1 review finding). + SessionStart: [ + { + matcher: "", + hooks: [{ type: "command", command: hookCommand("SessionStart") }], + }, + ], + }, + }; + return JSON.stringify(settings); +} + +// ----------------------------------------------------------------------------- +// Hook sink — a Node.js helper script the Stop / SessionStart hooks +// invoke. It appends hook payloads to a per-session event log and +// records the runtime `session_id` that SessionStart carries. The +// daemon reads the log to: +// 1. flip `hasStopEvent()` when Claude finishes a turn +// 2. look up the authoritative JSONL filename (`.jsonl`) +// so concurrent PTY sessions in the same cwd never tail each +// other's transcripts (was a P1 review finding). +// +// Using Node (instead of POSIX `sh`) gives us a single script that +// works on Windows as well — Claude Code itself is a Node CLI so +// `node` is always on PATH for any environment where the daemon can +// launch `claude`. +// ----------------------------------------------------------------------------- + +interface HookSink { + hookCommand: (event: string) => string; + hasStopEvent(): boolean; + readRuntimeSessionId(): string | null; + cleanup(): void; +} + +function createHookSink(sessionId: string): HookSink { + const tmpDir = path.join(os.tmpdir(), "superset-todo-pty"); + try { + fs.mkdirSync(tmpDir, { recursive: true }); + } catch { + /* ignore */ + } + const stamp = `${sessionId}-${Date.now()}`; + const eventsPath = path.join(tmpDir, `hook-${stamp}.log`); + const sessionIdPath = path.join(tmpDir, `hook-${stamp}.session-id`); + const scriptPath = path.join(tmpDir, `hook-${stamp}.js`); + try { + fs.writeFileSync(eventsPath, ""); + } catch { + /* ignore */ + } + + // Cross-platform Node hook runner. It reads stdin (the hook + // payload JSON), appends an NDJSON line to the events log, and — + // when Claude's runtime session_id is in the payload — records it + // to a sibling file the daemon polls during JSONL discovery. + const script = `#!/usr/bin/env node +const fs = require('fs'); +const EVENT = process.argv[2] || ''; +const EVENTS_LOG = ${JSON.stringify(eventsPath)}; +const SESSION_ID_FILE = ${JSON.stringify(sessionIdPath)}; +let chunks = []; +process.stdin.on('data', (c) => { chunks.push(c); }); +process.stdin.on('end', () => { + const raw = Buffer.concat(chunks).toString('utf8'); + try { + fs.appendFileSync(EVENTS_LOG, JSON.stringify({ event: EVENT, input: raw }) + '\\n'); + } catch (_) { /* ignore */ } + try { + const payload = JSON.parse(raw); + const sid = payload && typeof payload === 'object' ? payload.session_id : null; + if (typeof sid === 'string' && sid.length > 0) { + fs.writeFileSync(SESSION_ID_FILE, sid); + } + } catch (_) { /* non-JSON payload, ignore */ } + process.exit(0); +}); +process.stdin.on('error', () => process.exit(0)); +`; + try { + fs.writeFileSync(scriptPath, script, { mode: 0o755 }); + } catch (err) { + console.warn("[todo-daemon:pty] failed to write hook script:", err); + } + + // Quote-safe command string accepted by Claude Code hook runner + // (spawned through a shell on all platforms). The script path may + // contain spaces on macOS ("/Users/x y/..."), so wrap it in + // double-quotes and escape embedded quotes. + const quotedScript = `"${scriptPath.replace(/"/g, '\\"')}"`; + const hookCommand = (event: string) => `node ${quotedScript} ${event}`; + + return { + hookCommand, + hasStopEvent: () => { + try { + return fs.readFileSync(eventsPath, "utf8").includes('"event":"Stop"'); + } catch { + return false; + } + }, + readRuntimeSessionId: () => { + try { + const raw = fs.readFileSync(sessionIdPath, "utf8").trim(); + return raw.length > 0 ? raw : null; + } catch { + return null; + } + }, + cleanup: () => { + for (const p of [scriptPath, eventsPath, sessionIdPath]) { + try { + fs.unlinkSync(p); + } catch { + /* ignore */ + } + } + }, + }; +} + +// ----------------------------------------------------------------------------- +// TUI ready detection +// ----------------------------------------------------------------------------- + +async function waitForTuiReady( + getBuffer: () => string, + isAlive: () => boolean, + timeoutMs: number, +): Promise { + const deadline = Date.now() + timeoutMs; + let lastLen = 0; + let stableAt = Date.now(); + while (Date.now() < deadline) { + await sleep(200); + if (!isAlive()) return false; + const buf = getBuffer(); + if (buf.length === lastLen) { + if (Date.now() - stableAt >= TUI_READY_IDLE_MS) return true; + } else { + lastLen = buf.length; + stableAt = Date.now(); + } + } + return false; +} + +// ----------------------------------------------------------------------------- +// /remote-control flow +// ----------------------------------------------------------------------------- + +async function activateRemoteControl( + ptyProc: IPty, + getBuffer: () => string, + onUrl: (url: string) => void, + onError: (msg: string) => void, +): Promise { + const bufferLenBefore = getBuffer().length; + try { + ptyProc.write("/remote-control\r"); + } catch (err) { + onError( + `/remote-control の送信に失敗: ${ + err instanceof Error ? err.message : String(err) + }`, + ); + return; + } + const deadline = Date.now() + REMOTE_CONTROL_URL_TIMEOUT_MS; + while (Date.now() < deadline) { + await sleep(250); + const snippet = getBuffer().slice(bufferLenBefore); + const cleaned = stripAnsi(snippet); + const m = cleaned.match(REMOTE_CONTROL_URL_RE); + if (m?.[0]) { + onUrl(m[0]); + return; + } + const errM = cleaned.match( + /Remote Control [^\n]*(?:requires|disabled|not enabled|not yet enabled|failed)[^\n]*/i, + ); + if (errM) { + onError(errM[0].trim()); + return; + } + } + onError("Remote Control の URL を取得できませんでした (タイムアウト)"); +} + +function stripAnsi(s: string): string { + // biome-ignore lint/suspicious/noControlCharactersInRegex: stripping real ANSI escapes from PTY output is the whole point + const csi = /\x1b\[[0-9;?]*[A-Za-z]/g; + // biome-ignore lint/suspicious/noControlCharactersInRegex: OSC terminator BEL (0x07) is the spec-defined end of an OSC sequence + const osc = /\x1b\][^\x07]*\x07/g; + return s.replace(csi, "").replace(osc, ""); +} + +// ----------------------------------------------------------------------------- +// JSONL tail +// ----------------------------------------------------------------------------- + +async function tailJsonl( + state: TurnState, + params: PtyTurnParams, +): Promise { + if (!state.jsonlPath) return; + let stat: fs.Stats; + try { + stat = fs.statSync(state.jsonlPath); + } catch { + return; + } + if (stat.size <= state.jsonlReadOffset) return; + const len = stat.size - state.jsonlReadOffset; + let buf: Buffer; + let fd: number | null = null; + try { + fd = fs.openSync(state.jsonlPath, "r"); + buf = Buffer.alloc(len); + fs.readSync(fd, buf, 0, len, state.jsonlReadOffset); + } catch { + // `finally` below handles cleanup — returning here without a + // separate `closeSync` avoids the double-close that would + // otherwise risk closing an unrelated fd if the descriptor + // was recycled between the two closes (CodeRabbit review). + return; + } finally { + if (fd != null) { + try { + fs.closeSync(fd); + } catch { + /* ignore */ + } + } + } + // Find the last newline at the raw-byte level so we never split + // on a UTF-8 multibyte boundary. `Buffer.toString("utf8")` would + // replace a truncated tail with U+FFFD and make + // `Buffer.byteLength(lastLine)` disagree with the underlying bytes — + // which breaks Japanese content in verdict reasons etc. + // (CodeRabbit review). + const lastNewlineIdx = buf.lastIndexOf(0x0a); + if (lastNewlineIdx < 0) { + // No newline in this chunk — defer everything until we see one. + return; + } + const consumedBytes = lastNewlineIdx + 1; + const text = buf.subarray(0, consumedBytes).toString("utf8"); + state.jsonlReadOffset += consumedBytes; + const lines = text.split("\n"); + const events: TodoStreamEvent[] = []; + for (const line of lines) { + if (!line.trim()) continue; + let payload: unknown; + try { + payload = JSON.parse(line); + } catch { + continue; + } + const classified = classifyJsonlRecord(payload); + if (classified.sessionId && !state.claudeSessionId) { + state.claudeSessionId = classified.sessionId; + } + if (classified.scheduledWakeup) { + state.scheduledWakeup = classified.scheduledWakeup; + } + if (classified.assistantText) { + state.lastAssistantText = classified.assistantText; + } + if (classified.usage) { + state.numTurns += 1; + } + for (const e of classified.events) { + events.push({ + id: randomUUID(), + ts: Date.now(), + iteration: params.iteration, + kind: e.kind, + label: e.label, + text: e.text, + toolUseId: e.toolUseId, + parentToolUseId: e.parentToolUseId, + }); + } + } + if (events.length > 0) { + getTodoSessionStore().appendStreamEvents(params.sessionId, events); + } +} + +// ----------------------------------------------------------------------------- +// JSONL record classifier +// ----------------------------------------------------------------------------- + +interface ClassifiedJsonlRecord { + sessionId: string | null; + assistantText: string | null; + usage: boolean; + scheduledWakeup: { delayMs: number; reason: string | null } | null; + events: Array<{ + kind: TodoStreamEventKind; + label: string; + text: string; + toolUseId?: string; + parentToolUseId?: string; + }>; +} + +function classifyJsonlRecord(payload: unknown): ClassifiedJsonlRecord { + const empty: ClassifiedJsonlRecord = { + sessionId: null, + assistantText: null, + usage: false, + scheduledWakeup: null, + events: [], + }; + if (typeof payload !== "object" || payload === null) return empty; + const rec = payload as Record; + const type = typeof rec.type === "string" ? (rec.type as string) : ""; + // Claude Code's transcript format has varied between camelCase + // (`sessionId`, `parentToolUseId`) and snake_case (`session_id`, + // `parent_tool_use_id`) across versions. Read both so the parser + // stays correct regardless of which shape the installed CLI + // emits — otherwise sessionId ends up null and the daemon has no + // way to bind to the right JSONL (CodeRabbit review finding). + const sessionId = + typeof rec.sessionId === "string" + ? (rec.sessionId as string) + : typeof rec.session_id === "string" + ? (rec.session_id as string) + : null; + const parentToolUseId = + typeof rec.parentToolUseId === "string" + ? (rec.parentToolUseId as string) + : typeof rec.parent_tool_use_id === "string" + ? (rec.parent_tool_use_id as string) + : undefined; + + if (type === "assistant") { + const msg = rec.message as { content?: unknown } | undefined; + const text = extractText(msg?.content); + const tool = extractToolUse(msg?.content); + const wakeup = extractScheduledWakeup(msg?.content); + const hasUsage = + typeof msg === "object" && + msg !== null && + typeof (msg as { usage?: unknown }).usage === "object"; + const events: ClassifiedJsonlRecord["events"] = []; + if (text) { + events.push({ + kind: "assistant_text", + label: "Claude", + text, + parentToolUseId, + }); + } + if (tool) { + events.push({ + kind: "tool_use", + label: tool.label, + text: tool.text, + toolUseId: tool.id, + parentToolUseId, + }); + } + return { + sessionId, + assistantText: text, + usage: hasUsage, + scheduledWakeup: wakeup, + events, + }; + } + + if (type === "user") { + const msg = rec.message as { content?: unknown } | undefined; + const result = extractToolResult(msg?.content); + if (result) { + return { + ...empty, + sessionId, + events: [ + { + kind: "tool_result", + label: "tool result", + text: truncate(result.text, 400), + toolUseId: result.toolUseId, + parentToolUseId, + }, + ], + }; + } + return empty; + } + + if (type === "system") { + const subtype = + typeof rec.subtype === "string" ? (rec.subtype as string) : ""; + if (subtype === "init") { + return { + ...empty, + sessionId, + events: [ + { + kind: "system_init", + label: "init", + text: `session ${sessionId ?? "?"} 準備完了`, + }, + ], + }; + } + return empty; + } + + return empty; +} + +function extractText(content: unknown): string | null { + if (!Array.isArray(content)) return null; + const parts: string[] = []; + for (const part of content) { + if (typeof part !== "object" || part === null) continue; + const rec = part as Record; + if (rec.type === "text" && typeof rec.text === "string") { + parts.push(rec.text as string); + } + } + const joined = parts.join("").trim(); + return joined.length > 0 ? joined : null; +} + +function extractToolUse( + content: unknown, +): { label: string; text: string; id: string | undefined } | null { + if (!Array.isArray(content)) return null; + for (const part of content) { + if (typeof part !== "object" || part === null) continue; + const rec = part as Record; + if (rec.type !== "tool_use") continue; + const name = typeof rec.name === "string" ? (rec.name as string) : "tool"; + const id = typeof rec.id === "string" ? (rec.id as string) : undefined; + const input = rec.input; + return { label: name, text: summarizeToolInput(name, input), id }; + } + return null; +} + +function extractScheduledWakeup( + content: unknown, +): { delayMs: number; reason: string | null } | null { + if (!Array.isArray(content)) return null; + for (const part of content) { + if (typeof part !== "object" || part === null) continue; + const rec = part as Record; + if (rec.type !== "tool_use") continue; + if (rec.name !== "ScheduleWakeup") continue; + const input = rec.input; + if (typeof input !== "object" || input === null) continue; + const inp = input as Record; + const delaySeconds = + typeof inp.delaySeconds === "number" + ? (inp.delaySeconds as number) + : null; + if (delaySeconds == null || !Number.isFinite(delaySeconds)) continue; + const seconds = Math.floor(delaySeconds); + if (seconds < 60 || seconds > 3600) continue; + const reason = + typeof inp.reason === "string" ? (inp.reason as string) : null; + return { delayMs: seconds * 1000, reason }; + } + return null; +} + +function extractToolResult( + content: unknown, +): { text: string; toolUseId: string | undefined } | null { + if (!Array.isArray(content)) return null; + const parts: string[] = []; + let toolUseId: string | undefined; + let saw = false; + let imageCount = 0; + let otherCount = 0; + for (const part of content) { + if (typeof part !== "object" || part === null) continue; + const rec = part as Record; + if (rec.type !== "tool_result") continue; + saw = true; + if (!toolUseId && typeof rec.tool_use_id === "string") { + toolUseId = rec.tool_use_id as string; + } + const inner = rec.content; + if (typeof inner === "string") { + parts.push(inner); + } else if (Array.isArray(inner)) { + for (const p of inner) { + if (typeof p !== "object" || p === null) continue; + const pr = p as Record; + if (pr.type === "text" && typeof pr.text === "string") { + parts.push(pr.text as string); + } else if (pr.type === "image") { + imageCount += 1; + } else if (typeof pr.type === "string") { + otherCount += 1; + } + } + } + } + if (!saw) return null; + const joined = parts.join("\n").trim(); + if (joined.length > 0) return { text: joined, toolUseId }; + const summary: string[] = []; + if (imageCount > 0) { + summary.push(imageCount === 1 ? "[画像 1 件]" : `[画像 ${imageCount} 件]`); + } + if (otherCount > 0) { + summary.push(`[非テキストブロック ${otherCount} 件]`); + } + return { + text: summary.length > 0 ? summary.join(" ") : "(空の結果)", + toolUseId, + }; +} + +function summarizeToolInput(name: string, input: unknown): string { + if (typeof input !== "object" || input === null) return name; + const rec = input as Record; + const key = + typeof rec.command === "string" + ? (rec.command as string) + : typeof rec.file_path === "string" + ? (rec.file_path as string) + : typeof rec.path === "string" + ? (rec.path as string) + : typeof rec.pattern === "string" + ? (rec.pattern as string) + : typeof rec.description === "string" + ? (rec.description as string) + : null; + return key ? truncate(`${name}: ${key}`, 300) : name; +} + +function truncate(text: string, cap: number): string { + if (text.length <= cap) return text; + return `${text.slice(0, cap)}…`; +} + +// ----------------------------------------------------------------------------- +// Stream event append helpers +// ----------------------------------------------------------------------------- + +function appendRawEvent( + sessionId: string, + iteration: number, + kind: TodoStreamEventKind, + label: string, + text: string, +): void { + getTodoSessionStore().appendStreamEvents(sessionId, [ + { + id: randomUUID(), + ts: Date.now(), + iteration, + kind, + label, + text, + }, + ]); +} + +function ptyExitError( + state: TurnState, + exit: { exitCode: number; signal?: number } | null, + ptyBuffer: string, +): PtyTurnResult { + const tail = stripAnsi(ptyBuffer).split("\n").slice(-8).join("\n").trim(); + return { + result: state.lastAssistantText, + sessionId: state.claudeSessionId, + costUsd: state.costUsd, + numTurns: state.numTurns || null, + error: `claude (PTY) が exit code ${exit?.exitCode ?? "?"} で終了しました${ + tail ? `:\n${tail}` : "" + }`, + interrupted: false, + scheduledWakeup: state.scheduledWakeup, + }; +} + +// `extractAttachmentPaths` is exported to keep the same affordance +// supervisor-engine offers; callers can pre-inspect a prompt for +// attachment chips without duplicating the regex. +export function extractAttachmentPaths( + texts: (string | null | undefined)[], +): string[] { + const seen = new Set(); + const out: string[] = []; + for (const text of texts) { + if (!text) continue; + for (const m of text.matchAll(ATTACHMENT_PATH_RE)) { + const p = m[1]; + if (!p || seen.has(p)) continue; + seen.add(p); + out.push(p); + } + } + return out; +} diff --git a/apps/desktop/src/main/todo-daemon/supervisor-engine.ts b/apps/desktop/src/main/todo-daemon/supervisor-engine.ts new file mode 100644 index 00000000000..e6da4c8db43 --- /dev/null +++ b/apps/desktop/src/main/todo-daemon/supervisor-engine.ts @@ -0,0 +1,1678 @@ +import { type ChildProcess, spawn } from "node:child_process"; +import { randomUUID } from "node:crypto"; +import type { SelectTodoSession } from "@superset/local-db"; +import { getCurrentHeadSha } from "main/todo-agent/git-status"; +import { readTodoSessionRuntimeConfig } from "main/todo-agent/runtime-config"; +import { + getTodoSessionStore, + resolveWorktreePath, +} from "main/todo-agent/session-store"; +import { getTodoSettings } from "main/todo-agent/settings"; +import { + CLAUDE_EFFORT_OPTIONS, + CLAUDE_MODEL_OPTIONS, + type TodoStreamEventKind, +} from "main/todo-agent/types"; +import { runCodexTurn } from "./codex-turn-runner"; +import { runCrushTurn } from "./crush-turn-runner"; +import { runClaudeTurnPty } from "./pty-turn-runner"; + +/** + * Feature flag for the interactive PTY engine. When the daemon process + * is launched with `TODO_ENGINE=pty`, `runClaudeTurn` dispatches to the + * PTY runner (apps/desktop/src/main/todo-daemon/pty-turn-runner.ts) + * which supports Remote Control. Otherwise, the legacy `-p` headless + * path is used. The flag is process-wide (not per-session) because it + * governs which spawn path the daemon knows how to manage; Remote + * Control itself is still opt-in per session via + * `todo_sessions.remote_control_enabled`. + */ +const PTY_ENGINE_ENABLED = process.env.TODO_ENGINE === "pty"; + +/** + * Daemon-side supervisor engine. Spawns `claude -p` children for TODO + * sessions and drives their iteration loop. + * + * The original in-main supervisor used to live at + * `main/todo-agent/supervisor.ts`; it has been moved here so the claude + * children survive app restarts — see issue #237. + * + * This file is intentionally kept close to the original implementation; + * all calls to `getTodoSessionStore()` write to the daemon-local SQLite + * connection, and the daemon bridge re-broadcasts those writes to the + * connected main processes over the NDJSON socket. + */ + +interface ActiveRun { + sessionId: string; + abortController: AbortController; + lastFailingTest?: string; + consecutiveSameFailure: number; + startedAt: number; + currentChild: ChildProcess | null; +} + +export class TodoSupervisorEngine { + private readonly active = new Map(); + private readonly queue: string[] = []; + /** + * Sessions whose next queued start was triggered by `ScheduleWakeup` + * firing (scheduler.resumeDueWaitingSessions), not by a user click or + * a follow-up intervention. `runSession` consumes the marker to skip + * the "セッションを再開します" banner and to send a short continuation + * prompt instead of re-replaying the original goal — which Claude + * has already been working on in the same `--resume`d session. See + * issue #240. + */ + private readonly wakeupResumeMarkers = new Set(); + + listActiveSessionIds(): string[] { + return Array.from(this.active.keys()); + } + + async start( + sessionId: string, + options?: { fromScheduledWakeup?: boolean }, + ): Promise { + if (options?.fromScheduledWakeup) { + this.wakeupResumeMarkers.add(sessionId); + } else { + // A manual start (user click / follow-up intervention) always + // overrides a stale scheduler marker. Prevents a prior wakeup + // that never actually ran (e.g. abort landed between claim and + // drain) from silently relabeling the next manual resume. + this.wakeupResumeMarkers.delete(sessionId); + } + if (this.queue.includes(sessionId)) return; + const active = this.active.get(sessionId); + if (active && !active.abortController.signal.aborted) return; + this.queue.push(sessionId); + this.drain(); + } + + handleSettingsChanged(): void { + this.drain(); + } + + private drain(): void { + const capacity = getTodoSettings().maxConcurrentTasks; + while (this.active.size < capacity && this.queue.length > 0) { + const next = this.queue.shift(); + if (!next) continue; + const latest = getTodoSessionStore().get(next); + if (!latest) continue; + if ( + latest.status === "aborted" || + latest.status === "failed" || + latest.status === "done" || + latest.status === "escalated" + ) { + continue; + } + void this.runSession(next) + .catch((err) => { + console.warn(`[todo-daemon] runSession crashed for ${next}:`, err); + }) + .finally(() => { + this.drain(); + }); + } + } + + abort(sessionId: string): void { + const store = getTodoSessionStore(); + const queueIdx = this.queue.indexOf(sessionId); + if (queueIdx !== -1) { + this.queue.splice(queueIdx, 1); + } + // Clear any wakeup-resume marker so a subsequent manual start + // cannot misinterpret this session as a scheduler wakeup. + this.wakeupResumeMarkers.delete(sessionId); + const activeRun = this.active.get(sessionId); + if (activeRun) { + activeRun.abortController.abort(); + const child = activeRun.currentChild; + if (child?.pid) { + const pid = child.pid; + killProcessTree(pid, "SIGINT"); + const kill = setTimeout(() => { + if (child.exitCode == null && child.signalCode == null) { + killProcessTree(pid, "SIGKILL"); + } + }, 1500); + child.once("close", () => clearTimeout(kill)); + } + } + const session = store.get(sessionId); + if (!session) return; + if ( + session.status !== "done" && + session.status !== "failed" && + session.status !== "escalated" && + session.status !== "aborted" + ) { + store.update(sessionId, { + status: "aborted", + phase: "aborted", + completedAt: Date.now(), + }); + } + } + + queueIntervention(sessionId: string, data: string): void { + const store = getTodoSessionStore(); + const existing = store.get(sessionId); + if (!existing) return; + const previous = existing.pendingIntervention?.trim(); + const next = [previous, data.trim()].filter(Boolean).join("\n\n"); + store.update(sessionId, { pendingIntervention: next }); + } + + /** + * Abort every active run without flipping the `todo_sessions` status. + * Used when the daemon itself is being shut down — marking sessions + * as aborted would be a lie, since the user did not request it. + */ + shutdownAll(opts: { killChildren: boolean }): void { + for (const run of this.active.values()) { + run.abortController.abort(); + if (opts.killChildren) { + const child = run.currentChild; + if (child?.pid) killProcessTree(child.pid, "SIGINT"); + } + } + } + + private async runSession(sessionId: string): Promise { + const store = getTodoSessionStore(); + const session0 = store.get(sessionId); + if (!session0) return; + + // Consume the wakeup-resume marker (if any). A scheduler-driven + // resume from a `ScheduleWakeup`-paused session is not a new + // turn from Claude's perspective — Claude asked to be paged + // back later and is now continuing the same reasoning. Treat + // it differently from the user-driven done→follow-up resume. + const isFromScheduledWakeup = this.wakeupResumeMarkers.delete(sessionId); + + const isResumingPastRun = !!session0.claudeSessionId; + if (!isResumingPastRun) { + store.clearStreamEvents(sessionId); + } + store.setArtifactPathCache(sessionId, session0.artifactPath); + // Scheduler-driven wakeup resumes skip the "再開" banner — + // Claude requested the pause itself, so the pause+wakeup is a + // single logical turn and does not warrant a new-session marker. + if (isResumingPastRun && !isFromScheduledWakeup) { + appendSetupEvent( + sessionId, + "再開", + "セッションを再開します。これより下が新しいターンのストリームです。", + ); + } + + const ac = new AbortController(); + const run: ActiveRun = { + sessionId, + abortController: ac, + consecutiveSameFailure: 0, + startedAt: Date.now(), + currentChild: null, + }; + this.active.set(sessionId, run); + + try { + appendSetupEvent( + sessionId, + "セットアップ", + "ワークスペースを解決しています…", + ); + const worktreePath = resolveWorktreePath(session0.workspaceId); + if (worktreePath) { + appendSetupEvent(sessionId, "worktree", worktreePath); + } + const startHeadSha = + session0.startHeadSha ?? + (worktreePath ? await getCurrentHeadSha(worktreePath) : null); + if (startHeadSha) { + appendSetupEvent( + sessionId, + "開始時 HEAD", + `${startHeadSha.slice(0, 12)}`, + ); + } + if (session0.verifyCommand) { + appendSetupEvent(sessionId, "verify", session0.verifyCommand); + } else { + appendSetupEvent(sessionId, "モード", "単発タスク(外部 verify なし)"); + } + appendSetupEvent( + sessionId, + "予算", + `${session0.maxIterations} iter · ${Math.round(session0.maxWallClockSec / 60)} 分`, + ); + if (session0.customSystemPrompt?.trim()) { + const preview = session0.customSystemPrompt + .trim() + .replace(/\s+/g, " ") + .slice(0, 200); + appendSetupEvent( + sessionId, + "システムプロンプト", + `${preview}${session0.customSystemPrompt.trim().length > 200 ? "…" : ""}`, + ); + } + if (session0.claudeModel || session0.claudeEffort) { + const parts: string[] = []; + if (session0.claudeModel) parts.push(`model: ${session0.claudeModel}`); + if (session0.claudeEffort) + parts.push(`effort: ${session0.claudeEffort}`); + appendSetupEvent(sessionId, "Claude 設定", parts.join(" / ")); + } + if (session0.codexModel || session0.codexEffort) { + const parts: string[] = []; + if (session0.codexModel) parts.push(`model: ${session0.codexModel}`); + if (session0.codexEffort) parts.push(`effort: ${session0.codexEffort}`); + appendSetupEvent(sessionId, "Codex 設定", parts.join(" / ")); + } + const agentKind = + (session0.agentKind as "claude" | "codex" | null) ?? "claude"; + const runtimeConfig = readTodoSessionRuntimeConfig({ + artifactPath: session0.artifactPath, + fallbackRemoteControlEnabled: session0.remoteControlEnabled ?? false, + }); + const willUsePty = + agentKind === "claude" && + (PTY_ENGINE_ENABLED || runtimeConfig.ptyEnabled); + const remoteControlEnabled = + willUsePty && runtimeConfig.remoteControlEnabled; + if (agentKind === "codex") { + appendSetupEvent( + sessionId, + "Codex", + "codex exec --json --full-auto を起動します", + ); + appendSetupEvent( + sessionId, + "計測", + "Codex モードではコスト (USD) の集計はトークン数ベースになります。", + ); + } else { + appendSetupEvent( + sessionId, + "Claude", + willUsePty + ? "claude を PTY (interactive) モードで起動します" + : "claude -p --output-format stream-json を起動します", + ); + } + if (remoteControlEnabled) { + appendSetupEvent( + sessionId, + "Remote Control", + "有効 (PTY モード)。起動後に接続 URL を発行します。", + ); + } + if (willUsePty) { + // PTY 経路は Claude Code JSONL に cost_usd が載らない + // ため totalCostUsd の集計は当面行われません。ユーザー + // 可観測性のためセットアップバナーに明示します + // (CodeRabbit review #278)。 + appendSetupEvent( + sessionId, + "計測", + "PTY モードではコスト (USD) の集計が無効化されます。ターン数は計測されます。", + ); + } + + const preservedClaudeSessionId = isResumingPastRun + ? (session0.claudeSessionId ?? null) + : null; + store.update(sessionId, { + status: "running", + phase: "running", + startedAt: Date.now(), + completedAt: null, + verdictPassed: null, + verdictReason: null, + verdictFailingTest: null, + // Keep the prior assistant text on a user-driven resume + // so the Manager shows the last known answer while the + // new turn streams. On a scheduler wakeup, clear it — + // the stale response has been visible the whole time + // under the "待機中" label and the user wants a clean + // slate under the "最終回答" label once the new turn + // starts producing output (issue #240). + finalAssistantText: + isResumingPastRun && !isFromScheduledWakeup + ? (session0.finalAssistantText ?? null) + : null, + claudeSessionId: preservedClaudeSessionId, + totalCostUsd: isResumingPastRun + ? (session0.totalCostUsd ?? null) + : null, + totalNumTurns: isResumingPastRun + ? (session0.totalNumTurns ?? null) + : null, + iteration: 0, + startHeadSha, + waitingUntil: null, + waitingReason: null, + }); + + if (!worktreePath) { + store.update(sessionId, { + status: "failed", + phase: "failed", + verdictReason: + "ワークスペースのパスを解決できませんでした(worktree も mainRepoPath も見つからない)", + completedAt: Date.now(), + }); + return; + } + + // Wakeup resumes intentionally drop the prior assistant text + // so mid-turn failures do not resurface a stale answer as + // if it were the new turn's output. + let claudeSessionId: string | null = preservedClaudeSessionId; + let lastAssistantText: string | null = + isResumingPastRun && !isFromScheduledWakeup + ? (session0.finalAssistantText ?? null) + : null; + let aggregatedCostUsd = isResumingPastRun + ? (session0.totalCostUsd ?? 0) + : 0; + let aggregatedNumTurns = isResumingPastRun + ? (session0.totalNumTurns ?? 0) + : 0; + let iteration = 0; + + while (iteration < session0.maxIterations) { + if (ac.signal.aborted) break; + if (Date.now() - run.startedAt > session0.maxWallClockSec * 1000) { + store.update(sessionId, { + status: "escalated", + phase: "escalated", + verdictReason: "wall-clock 予算を使い切りました", + finalAssistantText: lastAssistantText, + claudeSessionId, + totalCostUsd: aggregatedCostUsd || null, + totalNumTurns: aggregatedNumTurns || null, + completedAt: Date.now(), + }); + return; + } + + iteration += 1; + store.update(sessionId, { + iteration, + phase: "running", + }); + + const liveSession = store.get(sessionId); + const pendingIntervention = liveSession?.pendingIntervention ?? null; + if (pendingIntervention) { + store.update(sessionId, { pendingIntervention: null }); + } + + const currentSession = store.get(sessionId); + if (!currentSession) return; + + const prompt = buildIterationPrompt({ + session: currentSession, + iteration, + previousVerdictReason: currentSession.verdictReason ?? null, + intervention: pendingIntervention, + // Only the very first turn after the scheduler wakes us + // up is a "continuation" — subsequent iterations within + // the same runSession are normal verify-retry loops. + // Require an actual resumable session: if the parked + // turn never produced a parseable `session_id`, + // claudeSessionId is null and `--resume` will not be + // passed. In that edge case the continuation-only + // prompt would strand Claude in a fresh conversation + // with no task context — fall back to the full + // iteration-1 prompt instead. + isScheduledWakeupContinuation: + isFromScheduledWakeup && iteration === 1 && claudeSessionId != null, + }); + + appendUserEvent(sessionId, iteration, prompt); + + const turnResult = await this.runAgentTurn({ + sessionId, + iteration, + cwd: worktreePath, + prompt, + resumeSessionId: claudeSessionId, + customSystemPrompt: currentSession.customSystemPrompt ?? null, + agentKind: + (currentSession.agentKind as "claude" | "codex" | "crush" | null) ?? + "claude", + claudeModel: currentSession.claudeModel ?? null, + claudeEffort: currentSession.claudeEffort ?? null, + codexModel: currentSession.codexModel ?? null, + codexEffort: currentSession.codexEffort ?? null, + crushModel: currentSession.crushModel ?? null, + signal: ac.signal, + usePty: willUsePty, + remoteControlEnabled, + onChild: (child) => { + run.currentChild = child; + }, + }); + run.currentChild = null; + + if (ac.signal.aborted) return; + + if (turnResult.interrupted) { + if (turnResult.sessionId) { + claudeSessionId = turnResult.sessionId; + } + if (turnResult.result) { + lastAssistantText = turnResult.result; + aggregatedCostUsd += turnResult.costUsd ?? 0; + aggregatedNumTurns += turnResult.numTurns ?? 0; + } + store.update(sessionId, { + claudeSessionId, + finalAssistantText: lastAssistantText, + totalCostUsd: aggregatedCostUsd || null, + totalNumTurns: aggregatedNumTurns || null, + }); + continue; + } + + if (turnResult.error && !turnResult.result) { + store.update(sessionId, { + status: "failed", + phase: "failed", + verdictReason: turnResult.error, + finalAssistantText: lastAssistantText, + claudeSessionId, + totalCostUsd: aggregatedCostUsd || null, + totalNumTurns: aggregatedNumTurns || null, + completedAt: Date.now(), + }); + return; + } + + if (turnResult.sessionId) { + claudeSessionId = turnResult.sessionId; + } + if (turnResult.result) { + lastAssistantText = turnResult.result; + aggregatedCostUsd += turnResult.costUsd ?? 0; + aggregatedNumTurns += turnResult.numTurns ?? 0; + store.update(sessionId, { + claudeSessionId, + finalAssistantText: lastAssistantText, + totalCostUsd: aggregatedCostUsd || null, + totalNumTurns: aggregatedNumTurns || null, + }); + } + + if (!currentSession.verifyCommand) { + const postTurn = store.get(sessionId); + const hasFollowUp = + (postTurn?.pendingIntervention ?? "").trim().length > 0; + if (hasFollowUp) { + store.update(sessionId, { + claudeSessionId, + finalAssistantText: lastAssistantText, + totalCostUsd: aggregatedCostUsd || null, + totalNumTurns: aggregatedNumTurns || null, + }); + continue; + } + if (turnResult.scheduledWakeup) { + const waitingUntil = + Date.now() + turnResult.scheduledWakeup.delayMs; + store.update(sessionId, { + status: "waiting", + phase: "waiting", + verdictPassed: null, + verdictReason: null, + finalAssistantText: lastAssistantText, + claudeSessionId, + totalCostUsd: aggregatedCostUsd || null, + totalNumTurns: aggregatedNumTurns || null, + waitingUntil, + waitingReason: turnResult.scheduledWakeup.reason, + completedAt: null, + }); + appendRawEvent( + sessionId, + iteration, + "system_init", + "waiting", + `ScheduleWakeup を検知。${Math.round( + turnResult.scheduledWakeup.delayMs / 1000, + )}秒後に再開します${ + turnResult.scheduledWakeup.reason + ? ` (${turnResult.scheduledWakeup.reason})` + : "" + }`, + ); + return; + } + store.update(sessionId, { + status: "done", + phase: "done", + verdictPassed: true, + verdictReason: lastAssistantText, + finalAssistantText: lastAssistantText, + claudeSessionId, + totalCostUsd: aggregatedCostUsd || null, + totalNumTurns: aggregatedNumTurns || null, + completedAt: Date.now(), + }); + return; + } + + store.update(sessionId, { phase: "verifying" }); + const verdict = await runVerify( + currentSession.verifyCommand, + worktreePath, + ac.signal, + ); + if (ac.signal.aborted) return; + appendVerifyEvent(sessionId, iteration, verdict); + + if (verdict.passed) { + store.update(sessionId, { + status: "done", + phase: "done", + verdictPassed: true, + verdictReason: + lastAssistantText ?? "verify コマンドが exit 0 で完了しました", + finalAssistantText: lastAssistantText, + claudeSessionId, + totalCostUsd: aggregatedCostUsd || null, + totalNumTurns: aggregatedNumTurns || null, + completedAt: Date.now(), + }); + return; + } + + if ( + verdict.failingTest && + verdict.failingTest === run.lastFailingTest + ) { + run.consecutiveSameFailure += 1; + } else { + run.consecutiveSameFailure = 1; + run.lastFailingTest = verdict.failingTest; + } + if (run.consecutiveSameFailure >= 3) { + store.update(sessionId, { + status: "escalated", + phase: "escalated", + verdictPassed: false, + verdictReason: `futility: ${ + verdict.failingTest ?? "同一失敗" + } が ${run.consecutiveSameFailure} 回連続で再現しました`, + verdictFailingTest: verdict.failingTest, + finalAssistantText: lastAssistantText, + claudeSessionId, + totalCostUsd: aggregatedCostUsd || null, + totalNumTurns: aggregatedNumTurns || null, + completedAt: Date.now(), + }); + return; + } + + store.update(sessionId, { + verdictPassed: false, + verdictReason: tailForReason(verdict.log), + verdictFailingTest: verdict.failingTest, + }); + } + + if (!ac.signal.aborted) { + store.update(sessionId, { + status: "escalated", + phase: "escalated", + verdictReason: "iteration 予算を使い切りました", + finalAssistantText: lastAssistantText, + claudeSessionId, + totalCostUsd: aggregatedCostUsd || null, + totalNumTurns: aggregatedNumTurns || null, + completedAt: Date.now(), + }); + } + } finally { + this.active.delete(sessionId); + } + } + + private async runAgentTurn(params: { + sessionId: string; + iteration: number; + cwd: string; + prompt: string; + resumeSessionId: string | null; + customSystemPrompt: string | null; + agentKind: "claude" | "codex" | "crush"; + claudeModel: string | null; + claudeEffort: string | null; + codexModel: string | null; + codexEffort: string | null; + crushModel: string | null; + signal: AbortSignal; + usePty: boolean; + onChild: (child: ChildProcess) => void; + remoteControlEnabled: boolean; + }): Promise<{ + result: string | null; + sessionId: string | null; + costUsd: number | null; + numTurns: number | null; + error: string | null; + interrupted: boolean; + scheduledWakeup: { delayMs: number; reason: string | null } | null; + }> { + if (params.agentKind === "codex") { + const codexResult = await runCodexTurn({ + sessionId: params.sessionId, + iteration: params.iteration, + cwd: params.cwd, + prompt: params.prompt, + resumeThreadId: params.resumeSessionId, + customSystemPrompt: params.customSystemPrompt, + codexModel: params.codexModel, + codexEffort: params.codexEffort, + signal: params.signal, + onChild: params.onChild, + }); + return { + result: codexResult.result, + sessionId: codexResult.threadId, + costUsd: codexResult.costUsd, + numTurns: codexResult.numTurns, + error: codexResult.error, + interrupted: codexResult.interrupted, + scheduledWakeup: null, + }; + } + + if (params.agentKind === "crush") { + const crushResult = await runCrushTurn({ + sessionId: params.sessionId, + iteration: params.iteration, + cwd: params.cwd, + prompt: params.prompt, + resumeSessionId: params.resumeSessionId, + customSystemPrompt: params.customSystemPrompt, + crushModel: params.crushModel, + signal: params.signal, + onChild: params.onChild, + emit: (evt) => { + getTodoSessionStore().appendStreamEvents(params.sessionId, [ + { + id: evt.id, + ts: evt.ts, + iteration: evt.iteration, + kind: evt.kind, + label: evt.label, + text: evt.text, + ...(evt.toolUseId ? { toolUseId: evt.toolUseId } : {}), + }, + ]); + }, + }); + return { + result: crushResult.result, + sessionId: crushResult.sessionId, + costUsd: crushResult.costUsd, + numTurns: crushResult.numTurns, + error: crushResult.error, + interrupted: crushResult.interrupted, + scheduledWakeup: null, + }; + } + + // Claude Code path + if (params.usePty) { + return runClaudeTurnPty({ + sessionId: params.sessionId, + iteration: params.iteration, + cwd: params.cwd, + prompt: params.prompt, + resumeSessionId: params.resumeSessionId, + customSystemPrompt: params.customSystemPrompt, + claudeModel: params.claudeModel, + claudeEffort: params.claudeEffort, + signal: params.signal, + remoteControlEnabled: params.remoteControlEnabled, + // The legacy caller only knows how to track a + // ChildProcess-shaped handle. The PTY runner hands + // back an opaque handle plus an `onExit` subscription; + // wrap both into a shim so `abort()` and its + // `once("close", ...)` SIGKILL-cancel path keep + // working. + onChild: (handle) => { + params.onChild(buildChildProcessShim(handle)); + }, + }); + } + return this.runClaudeTurnHeadless(params); + } + + private runClaudeTurnHeadless(params: { + sessionId: string; + iteration: number; + cwd: string; + prompt: string; + resumeSessionId: string | null; + customSystemPrompt: string | null; + claudeModel: string | null; + claudeEffort: string | null; + signal: AbortSignal; + onChild: (child: ChildProcess) => void; + }): Promise<{ + result: string | null; + sessionId: string | null; + costUsd: number | null; + numTurns: number | null; + error: string | null; + interrupted: boolean; + scheduledWakeup: { delayMs: number; reason: string | null } | null; + }> { + return new Promise((resolve) => { + const args = [ + "-p", + "--output-format", + "stream-json", + "--verbose", + "--include-partial-messages", + "--permission-mode", + "bypassPermissions", + ]; + if (params.customSystemPrompt) { + args.push("--append-system-prompt", params.customSystemPrompt); + } + // Per-session Claude Code overrides. Passing `--model` / + // `--effort` only when set keeps Claude Code's own default + // resolution path intact for users who haven't picked one. + // + // Defense-in-depth whitelist: the UI already constrains + // values via `CLAUDE_*_OPTIONS`, but that validation happens + // on the render side. A corrupted / migrated row could still + // persist an unexpected string. We refuse to forward anything + // that isn't in the allow-list so the spawn call can't be + // steered by a malformed DB value. + if ( + params.claudeModel && + (CLAUDE_MODEL_OPTIONS as readonly string[]).includes(params.claudeModel) + ) { + args.push("--model", params.claudeModel); + } else if (params.claudeModel) { + console.warn( + "[todo-daemon] ignoring unknown claudeModel:", + params.claudeModel, + ); + } + if ( + params.claudeEffort && + (CLAUDE_EFFORT_OPTIONS as readonly string[]).includes( + params.claudeEffort, + ) + ) { + args.push("--effort", params.claudeEffort); + } else if (params.claudeEffort) { + console.warn( + "[todo-daemon] ignoring unknown claudeEffort:", + params.claudeEffort, + ); + } + if (params.resumeSessionId) { + args.push("--resume", params.resumeSessionId); + } + args.push(params.prompt); + + let child: ChildProcess; + try { + child = spawn("claude", args, { + cwd: params.cwd, + env: process.env, + detached: process.platform !== "win32", + }); + } catch (error) { + resolve({ + result: null, + sessionId: null, + costUsd: null, + numTurns: null, + error: + error instanceof Error + ? `claude を起動できませんでした: ${error.message}` + : "claude を起動できませんでした", + interrupted: false, + scheduledWakeup: null, + }); + return; + } + + params.onChild(child); + + let claudeSessionId: string | null = null; + let resultText: string | null = null; + let costUsd: number | null = null; + let numTurns: number | null = null; + let errorText: string | null = null; + let stdoutBuffer = ""; + let stderrBuffer = ""; + let settled = false; + let interruptedForIntervention = false; + let scheduledWakeup: { + delayMs: number; + reason: string | null; + } | null = null; + + const onAbort = () => { + if (child.pid) { + killProcessTree(child.pid, "SIGINT"); + } + }; + params.signal.addEventListener("abort", onAbort); + + const interventionPoll = setInterval(() => { + if (settled || params.signal.aborted) { + clearInterval(interventionPoll); + return; + } + const live = getTodoSessionStore().get(params.sessionId); + if (live?.pendingIntervention?.trim()) { + interruptedForIntervention = true; + clearInterval(interventionPoll); + appendRawEvent( + params.sessionId, + params.iteration, + "system_init", + "介入", + "ユーザ介入を検知。現在のターンを中断して介入内容で再開します…", + ); + try { + child.kill("SIGINT"); + } catch { + // ignore + } + } + }, 500); + + const settle = () => { + if (settled) return; + settled = true; + clearInterval(interventionPoll); + params.signal.removeEventListener("abort", onAbort); + if (stdoutBuffer.trim().length > 0) { + handleLine(stdoutBuffer.trim()); + stdoutBuffer = ""; + } + resolve({ + result: resultText, + sessionId: claudeSessionId, + costUsd, + numTurns, + error: interruptedForIntervention ? null : errorText, + interrupted: interruptedForIntervention, + scheduledWakeup, + }); + }; + + const drainLines = (chunk: string) => { + stdoutBuffer += chunk; + let newlineIdx = stdoutBuffer.indexOf("\n"); + while (newlineIdx !== -1) { + const line = stdoutBuffer.slice(0, newlineIdx).trim(); + stdoutBuffer = stdoutBuffer.slice(newlineIdx + 1); + if (line.length > 0) { + handleLine(line); + } + newlineIdx = stdoutBuffer.indexOf("\n"); + } + }; + + const handleLine = (line: string) => { + let payload: unknown; + try { + payload = JSON.parse(line); + } catch { + appendRawEvent( + params.sessionId, + params.iteration, + "raw", + "raw", + line.slice(0, 600), + ); + return; + } + const parsed = classifyStreamJson(payload); + if (parsed.sessionId && !claudeSessionId) { + claudeSessionId = parsed.sessionId; + } + if (parsed.resultText) { + resultText = parsed.resultText; + } + if (parsed.costUsd != null) { + costUsd = parsed.costUsd; + } + if (parsed.numTurns != null) { + numTurns = parsed.numTurns; + } + if (parsed.scheduledWakeup) { + scheduledWakeup = parsed.scheduledWakeup; + } + if (parsed.event) { + getTodoSessionStore().appendStreamEvents(params.sessionId, [ + { + id: randomUUID(), + ts: Date.now(), + iteration: params.iteration, + kind: parsed.event.kind, + label: parsed.event.label, + text: parsed.event.text, + toolUseId: parsed.event.toolUseId, + parentToolUseId: parsed.event.parentToolUseId, + }, + ]); + } + }; + + child.stdout?.setEncoding("utf8"); + child.stdout?.on("data", (chunk: string) => { + drainLines(chunk); + }); + child.stderr?.setEncoding("utf8"); + child.stderr?.on("data", (chunk: string) => { + stderrBuffer += chunk; + if (stderrBuffer.length > 16_000) { + stderrBuffer = stderrBuffer.slice(-16_000); + } + }); + + child.on("error", (err) => { + if (!errorText) { + errorText = `claude プロセスエラー: ${err.message}`; + } + settle(); + }); + child.on("close", (code) => { + if (code !== 0 && !resultText && !errorText) { + const tail = stderrBuffer.trim().split("\n").slice(-6).join("\n"); + errorText = `claude が exit code ${code} で終了しました${ + tail ? `:\n${tail}` : "" + }`; + } + settle(); + }); + }); + } +} + +// ============================================================================ +// Helpers +// ============================================================================ + +/** + * Thin ChildProcess façade over the opaque `{ pid, kill }` handle the + * PTY runner hands back. The supervisor only ever touches `.pid`, + * `.kill`, and `.exitCode` / `.signalCode` on its recorded child — it + * never reads stdout/stderr from this reference (those are consumed + * inside the PTY runner itself). The shim stubs out the rest as a + * minimal EventEmitter-free façade so TypeScript accepts it in place + * of the real ChildProcess. + */ +function buildChildProcessShim(handle: { + pid: number | null; + kill: () => void; + /** + * Register a callback the PTY runner invokes on spawn exit. The + * supervisor's abort path records a `once("close", ...)` listener + * to clear its 1.5s SIGKILL fallback timer — without an exit + * notification the timer always fires even when the PTY died + * cleanly, which is a best-effort `kill(-pid, SIGKILL)` against a + * potentially recycled PID (CodeRabbit review). + */ + onExit: (cb: () => void) => void; +}): ChildProcess { + let killed = false; + const closeListeners = new Set<() => void>(); + const shim = { + pid: handle.pid ?? undefined, + exitCode: null as number | null, + signalCode: null as NodeJS.Signals | null, + kill: (_signal?: NodeJS.Signals | number): boolean => { + if (killed) return true; + killed = true; + try { + handle.kill(); + shim.signalCode = "SIGTERM" as NodeJS.Signals; + } catch { + /* ignore */ + } + return true; + }, + once: (event: string, listener: (...args: unknown[]) => void) => { + if (event === "close" || event === "exit") { + const wrapped = () => { + closeListeners.delete(wrapped); + try { + listener(); + } catch { + /* ignore */ + } + }; + closeListeners.add(wrapped); + } + return shim; + }, + on: (event: string, listener: (...args: unknown[]) => void) => { + if (event === "close" || event === "exit") { + const wrapped = () => { + try { + listener(); + } catch { + /* ignore */ + } + }; + closeListeners.add(wrapped); + } + return shim; + }, + off: (_event: string, _listener: (...args: unknown[]) => void) => shim, + removeListener: (_event: string, _listener: (...args: unknown[]) => void) => + shim, + removeAllListeners: (_event?: string) => shim, + emit: (_event: string, ..._args: unknown[]) => false, + }; + handle.onExit(() => { + // Mark terminated so the supervisor's abort path's check + // `child.exitCode == null && child.signalCode == null` stops + // being universally true, and fire listeners in-order. + if (shim.exitCode == null) shim.exitCode = 0; + for (const cb of Array.from(closeListeners)) cb(); + }); + // The supervisor's abort path only reaches into `.pid` and `.kill()`. + // Cast through `unknown` to sidestep the structural mismatch; the + // shim's surface area is deliberately minimal and the daemon never + // inspects streams on this reference. + return shim as unknown as ChildProcess; +} + +function killProcessTree(pid: number, signal: NodeJS.Signals): void { + if (process.platform === "win32") { + try { + const killer = spawn("taskkill", ["/pid", String(pid), "/T", "/F"], { + stdio: "ignore", + detached: true, + }); + killer.on("error", () => { + /* best-effort */ + }); + killer.unref(); + } catch { + // best-effort + } + return; + } + try { + process.kill(-pid, signal); + } catch { + try { + process.kill(pid, signal); + } catch { + // ignore + } + } +} + +/** + * Pull attachment file paths out of description/goal markdown. Mirrors + * the renderer regex in `TodoManager/utils/attachmentRefs` so the same + * `todo-agent/attachments/` references the UI renders as chips are the + * ones we surface to Claude as "please Read this". The regex is + * duplicated intentionally — the renderer module lives in the web + * bundle and we don't want a cross-bundle import here in the daemon. + */ +const ATTACHMENT_PATH_RE = + /!\[[^\]]*\]\(([^()\s]*[/\\]todo-agent[/\\]attachments[/\\][^)\s]+)\)/g; + +function extractAttachmentPaths( + texts: (string | null | undefined)[], +): string[] { + const seen = new Set(); + const out: string[] = []; + for (const text of texts) { + if (!text) continue; + for (const m of text.matchAll(ATTACHMENT_PATH_RE)) { + const p = m[1]; + if (!p || seen.has(p)) continue; + seen.add(p); + out.push(p); + } + } + return out; +} + +function buildIterationPrompt(params: { + session: SelectTodoSession; + iteration: number; + previousVerdictReason: string | null; + intervention: string | null; + isScheduledWakeupContinuation?: boolean; +}): string { + const { + session, + iteration, + previousVerdictReason, + intervention, + isScheduledWakeupContinuation, + } = params; + const goalPath = `.superset/todo/${session.id}/goal.md`; + const goalClause = session.goal?.trim() + ? "ゴール(受け入れ条件)を達成することを目指してください" + : "『やって欲しいこと』が完了した時点で完了とみなしてください"; + + const sections: string[] = []; + if (isScheduledWakeupContinuation) { + // Claude paused itself via `ScheduleWakeup` and the scheduler + // has now woken it up. The original goal and custom system + // prompt are already present in the resumed conversation — do + // not re-send them verbatim, that duplicate prompt is the + // "ゴリ押し" complaint in issue #240. A short continuation cue + // is enough; the user-visible intervention (if any) is still + // routed through the normal channel below. + sections.push( + "(予定時刻になりました。前回の続きから作業を再開してください。)", + ); + } else if (iteration === 1) { + if (session.customSystemPrompt?.trim()) { + sections.push( + `ユーザー設定のシステム指示(最優先で遵守):\n${session.customSystemPrompt.trim()}`, + ); + } + sections.push( + `${goalPath} を読んで、${goalClause}。作業ディレクトリは worktree のルートです。`, + ); + sections.push(`説明: ${session.description}`); + if (session.goal?.trim()) { + sections.push(`ゴール:\n${session.goal.trim()}`); + } + // Hoist attachment file paths out of the markdown so Claude + // doesn't have to decide on its own whether `![](…)` inside the + // description is decorative or a real artifact it should load. + // Before this nudge, image attachments were frequently ignored — + // the file was saved and the path was correct, but Claude would + // proceed without ever calling Read on it. See #247. + const attachments = extractAttachmentPaths([ + session.description, + session.goal, + ]); + if (attachments.length > 0) { + sections.push( + [ + "添付ファイル(作業開始前に Read で内容を確認してください):", + ...attachments.map((p) => `- ${p}`), + ].join("\n"), + ); + } + } else { + sections.push( + `イテレーション ${iteration} です。前回の verify は失敗しました。`, + ); + if (previousVerdictReason) { + sections.push(`前回の verify 結果:\n${previousVerdictReason}`); + } + sections.push(`${goalPath} を読み直し、${goalClause}。`); + } + if (intervention) { + sections.push(`ユーザーからの介入指示(優先度: 高):\n${intervention}`); + } + if (session.verifyCommand && !isScheduledWakeupContinuation) { + sections.push( + `完了判定: 作業が終わったら、セッション終了後に supervisor が \`${session.verifyCommand}\` を実行して exit 0 を要求します。`, + ); + } + return sections.join("\n\n"); +} + +function tailForReason(log: string): string { + const tail = log.trim().split("\n").slice(-20).join("\n"); + return tail.length > 2000 ? tail.slice(-2000) : tail; +} + +interface VerifyResult { + passed: boolean; + log: string; + failingTest?: string; +} + +function runVerify( + verifyCommand: string, + cwd: string, + signal: AbortSignal, +): Promise { + return new Promise((resolve) => { + const child = spawn("sh", ["-c", verifyCommand], { + cwd, + env: process.env, + signal, + }); + let buf = ""; + child.stdout.on("data", (d) => { + buf += d.toString(); + }); + child.stderr.on("data", (d) => { + buf += d.toString(); + }); + child.on("error", (err) => { + resolve({ passed: false, log: `${err.message}\n${buf}` }); + }); + child.on("close", (code) => { + const passed = code === 0; + resolve({ + passed, + log: buf, + failingTest: passed ? undefined : guessFailingTest(buf), + }); + }); + }); +} + +function guessFailingTest(log: string): string | undefined { + // biome-ignore lint/suspicious/noControlCharactersInRegex: stripping real ANSI escapes from verify output is the whole point + const stripAnsi = log.replace(/\u001B\[[0-9;]*m/g, ""); + const lines = stripAnsi.split("\n"); + const patterns: RegExp[] = [ + /^\s*\(fail\)\s+(.+?)(?:\s+\[\d.*)?$/i, + /^\s*❯\s+(.+?)(?:\s+\d+ms)?$/, + /^\s*FAIL\s+(.+?)(?:\s+>\s+.+)?$/, + /^\s*✕\s+(.+?)(?:\s+\(\d+\s*ms\))?$/, + /^\s*×\s+(.+?)(?:\s+\(\d+\s*ms\))?$/, + /^\s*✗\s+(.+?)(?:\s+\(\d+\s*ms\))?$/, + /^\s*not ok \d+\s*-\s*(.+)$/, + /^\s*\d+\)\s+(?:\[[^\]]+\]\s+)?[›»>]\s+(.+)$/, + ]; + for (const line of lines) { + for (const re of patterns) { + const m = line.match(re); + if (m?.[1]) return normalizeTestId(m[1]); + } + } + const errorLine = lines.find((l) => /\b(Error|Assertion)\b.*:/.test(l)); + if (errorLine) return normalizeTestId(errorLine.trim()); + return undefined; +} + +function normalizeTestId(raw: string): string { + return raw + .trim() + .replace(/\s*\(\d+\s*ms\)\s*$/, "") + .replace(/\s*\[\d+(?:\.\d+)?\s*m?s\]\s*$/, "") + .replace(/@0x[0-9a-f]+/gi, "@0x?") + .replace(/:\s*expected.*$/i, "") + .slice(0, 240); +} + +// ----- stream-json parsing --------------------------------------------------- + +interface ClassifiedEvent { + kind: TodoStreamEventKind; + label: string; + text: string; + toolUseId?: string; + parentToolUseId?: string; +} + +interface ClassifiedLine { + sessionId: string | null; + resultText: string | null; + costUsd: number | null; + numTurns: number | null; + event: ClassifiedEvent | null; + scheduledWakeup: { delayMs: number; reason: string | null } | null; +} + +function classifyStreamJson(payload: unknown): ClassifiedLine { + const empty: ClassifiedLine = { + sessionId: null, + resultText: null, + costUsd: null, + numTurns: null, + event: null, + scheduledWakeup: null, + }; + if (typeof payload !== "object" || payload === null) return empty; + const rec = payload as Record; + const type = typeof rec.type === "string" ? (rec.type as string) : ""; + const sessionId = + typeof rec.session_id === "string" ? (rec.session_id as string) : null; + const parentToolUseId = + typeof rec.parent_tool_use_id === "string" + ? (rec.parent_tool_use_id as string) + : undefined; + + if (type === "system" && rec.subtype === "init") { + return { + ...empty, + sessionId, + event: { + kind: "system_init", + label: "init", + text: `session ${sessionId ?? "?"} 準備完了`, + }, + }; + } + + if (type === "assistant") { + const text = extractAssistantText(rec.message); + const tool = extractToolUseSummary(rec.message); + const wakeup = extractScheduledWakeup(rec.message); + if (text) { + return { + ...empty, + sessionId, + event: { + kind: "assistant_text", + label: "Claude", + text, + parentToolUseId, + }, + scheduledWakeup: wakeup, + }; + } + if (tool) { + return { + ...empty, + sessionId, + event: { + kind: "tool_use", + label: tool.label, + text: tool.text, + toolUseId: tool.id, + parentToolUseId, + }, + scheduledWakeup: wakeup, + }; + } + return empty; + } + + if (type === "user") { + const result = extractToolResultDetails(rec.message); + if (result) { + return { + ...empty, + sessionId, + event: { + kind: "tool_result", + label: "tool result", + text: truncate(result.text, 400), + toolUseId: result.toolUseId, + parentToolUseId, + }, + }; + } + return empty; + } + + if (type === "result") { + const resultText = + typeof rec.result === "string" ? (rec.result as string) : null; + const costUsd = + typeof rec.total_cost_usd === "number" + ? (rec.total_cost_usd as number) + : null; + const numTurns = + typeof rec.num_turns === "number" ? (rec.num_turns as number) : null; + return { + sessionId, + resultText, + costUsd, + numTurns, + event: { + kind: "result", + label: "result", + text: resultText ?? "(空の結果)", + }, + scheduledWakeup: null, + }; + } + + if ( + type === "error" || + (typeof rec.subtype === "string" && rec.subtype === "error") + ) { + const message = + typeof rec.error === "string" + ? (rec.error as string) + : JSON.stringify(rec).slice(0, 400); + return { + ...empty, + sessionId, + event: { kind: "error", label: "error", text: message }, + }; + } + + return empty; +} + +function extractAssistantText(message: unknown): string | null { + if (typeof message !== "object" || message === null) return null; + const content = (message as { content?: unknown }).content; + if (!Array.isArray(content)) return null; + const parts: string[] = []; + for (const part of content) { + if (typeof part !== "object" || part === null) continue; + const rec = part as Record; + if (rec.type === "text" && typeof rec.text === "string") { + parts.push(rec.text as string); + } + } + const joined = parts.join("").trim(); + return joined.length > 0 ? joined : null; +} + +function extractToolUseSummary( + message: unknown, +): { label: string; text: string; id: string | undefined } | null { + if (typeof message !== "object" || message === null) return null; + const content = (message as { content?: unknown }).content; + if (!Array.isArray(content)) return null; + for (const part of content) { + if (typeof part !== "object" || part === null) continue; + const rec = part as Record; + if (rec.type !== "tool_use") continue; + const name = typeof rec.name === "string" ? (rec.name as string) : "tool"; + const id = typeof rec.id === "string" ? (rec.id as string) : undefined; + const input = rec.input; + const inputSummary = summarizeToolInput(name, input); + return { label: name, text: inputSummary, id }; + } + return null; +} + +function extractScheduledWakeup( + message: unknown, +): { delayMs: number; reason: string | null } | null { + if (typeof message !== "object" || message === null) return null; + const content = (message as { content?: unknown }).content; + if (!Array.isArray(content)) return null; + for (const part of content) { + if (typeof part !== "object" || part === null) continue; + const rec = part as Record; + if (rec.type !== "tool_use") continue; + if (rec.name !== "ScheduleWakeup") continue; + const input = rec.input; + if (typeof input !== "object" || input === null) continue; + const inp = input as Record; + const delaySeconds = + typeof inp.delaySeconds === "number" + ? (inp.delaySeconds as number) + : null; + if (delaySeconds == null || !Number.isFinite(delaySeconds)) continue; + const seconds = Math.floor(delaySeconds); + if (seconds < 60 || seconds > 3600) continue; + const reason = + typeof inp.reason === "string" ? (inp.reason as string) : null; + return { delayMs: seconds * 1000, reason }; + } + return null; +} + +function extractToolResultDetails( + message: unknown, +): { text: string; toolUseId: string | undefined } | null { + if (typeof message !== "object" || message === null) return null; + const content = (message as { content?: unknown }).content; + if (!Array.isArray(content)) return null; + const parts: string[] = []; + let toolUseId: string | undefined; + let sawToolResult = false; + let imageCount = 0; + let otherBlockCount = 0; + for (const part of content) { + if (typeof part !== "object" || part === null) continue; + const rec = part as Record; + if (rec.type === "tool_result") { + sawToolResult = true; + if (!toolUseId && typeof rec.tool_use_id === "string") { + toolUseId = rec.tool_use_id as string; + } + const inner = rec.content; + if (typeof inner === "string") { + parts.push(inner); + } else if (Array.isArray(inner)) { + for (const p of inner) { + if (typeof p !== "object" || p === null) continue; + const pr = p as Record; + if (pr.type === "text" && typeof pr.text === "string") { + parts.push(pr.text as string); + } else if (pr.type === "image") { + imageCount += 1; + } else if (typeof pr.type === "string") { + otherBlockCount += 1; + } + } + } + } + } + // Bail only when the message didn't contain a tool_result block at + // all. If it did, emit the result even when it carried no text so + // the UI can pair it with its tool_use — otherwise e.g. Read on an + // image file (which returns only `image` blocks) leaves the card + // spinning "実行中…" forever even though Claude already processed + // the result and moved on to subsequent tool calls. See #247. + if (!sawToolResult) return null; + const joined = parts.join("\n").trim(); + if (joined.length > 0) return { text: joined, toolUseId }; + const summary: string[] = []; + if (imageCount > 0) { + summary.push(imageCount === 1 ? "[画像 1 件]" : `[画像 ${imageCount} 件]`); + } + if (otherBlockCount > 0) { + summary.push(`[非テキストブロック ${otherBlockCount} 件]`); + } + return { + text: summary.length > 0 ? summary.join(" ") : "(空の結果)", + toolUseId, + }; +} + +function summarizeToolInput(name: string, input: unknown): string { + if (typeof input !== "object" || input === null) { + return name; + } + const rec = input as Record; + const key = + typeof rec.command === "string" + ? (rec.command as string) + : typeof rec.file_path === "string" + ? (rec.file_path as string) + : typeof rec.path === "string" + ? (rec.path as string) + : typeof rec.pattern === "string" + ? (rec.pattern as string) + : typeof rec.description === "string" + ? (rec.description as string) + : null; + return key ? truncate(`${name}: ${key}`, 300) : name; +} + +function truncate(text: string, cap: number): string { + if (text.length <= cap) return text; + return `${text.slice(0, cap)}…`; +} + +function appendSetupEvent( + sessionId: string, + label: string, + text: string, +): void { + getTodoSessionStore().appendStreamEvents(sessionId, [ + { + id: randomUUID(), + ts: Date.now(), + iteration: 0, + kind: "system_init", + label, + text, + }, + ]); +} + +function appendUserEvent( + sessionId: string, + iteration: number, + prompt: string, +): void { + getTodoSessionStore().appendStreamEvents(sessionId, [ + { + id: randomUUID(), + ts: Date.now(), + iteration, + kind: "raw", + label: + iteration === 1 ? "最初のプロンプト" : `イテレーション ${iteration}`, + text: truncate(prompt, 4000), + }, + ]); +} + +function appendVerifyEvent( + sessionId: string, + iteration: number, + verdict: VerifyResult, +): void { + getTodoSessionStore().appendStreamEvents(sessionId, [ + { + id: randomUUID(), + ts: Date.now(), + iteration, + kind: verdict.passed ? "result" : "error", + label: verdict.passed ? "verify pass" : "verify fail", + text: truncate(verdict.log || "(no output)", 1200), + }, + ]); +} + +function appendRawEvent( + sessionId: string, + iteration: number, + kind: TodoStreamEventKind, + label: string, + text: string, +): void { + getTodoSessionStore().appendStreamEvents(sessionId, [ + { + id: randomUUID(), + ts: Date.now(), + iteration, + kind, + label, + text, + }, + ]); +} diff --git a/apps/desktop/src/main/windows/main.ts b/apps/desktop/src/main/windows/main.ts index 4867b91d227..ad5d2cda604 100644 --- a/apps/desktop/src/main/windows/main.ts +++ b/apps/desktop/src/main/windows/main.ts @@ -1,8 +1,10 @@ +import type { Server } from "node:http"; import { join } from "node:path"; -import { workspaces, worktrees } from "@superset/local-db"; +import * as Sentry from "@sentry/electron/main"; +import { projects, workspaces, worktrees } from "@superset/local-db"; import { eq } from "drizzle-orm"; import type { BrowserWindow } from "electron"; -import { app, Notification, nativeTheme } from "electron"; +import { app, Notification, nativeTheme, webContents } from "electron"; import { createWindow } from "lib/electron-app/factories/windows/create"; import { createAppRouter } from "lib/trpc/routers"; import { localDb } from "main/lib/local-db"; @@ -18,6 +20,7 @@ import { appState } from "../lib/app-state"; import { browserManager } from "../lib/browser/browser-manager"; import { createApplicationMenu } from "../lib/menu"; import { playNotificationSound } from "../lib/notification-sound"; +import { playAivisNotification } from "../lib/notifications/aivis-tts"; import { NotificationManager } from "../lib/notifications/notification-manager"; import { notificationsApp, @@ -28,8 +31,15 @@ import { getNotificationTitle, getWorkspaceName, } from "../lib/notifications/utils"; +import { + applyVibrancy, + DEFAULT_VIBRANCY_STATE, + getInitialWindowOptions as getInitialVibrancyOptions, +} from "../lib/vibrancy"; +import { windowManager } from "../lib/window-manager"; import { getInitialWindowBounds, + isWindowPositionPersistenceEnabled, loadWindowState, saveWindowState, } from "../lib/window-state"; @@ -38,29 +48,98 @@ import { getWorkspaceRuntimeRegistry } from "../lib/workspace-runtime"; // Singleton IPC handler to prevent duplicate handlers on window reopen (macOS) let ipcHandler: ReturnType | null = null; -function getWorkspaceNameFromDb(workspaceId: string | undefined): string { - if (!workspaceId) return "Workspace"; +function getWorkspaceRecords(workspaceId: string | undefined) { + if (!workspaceId) return { workspace: null, worktree: null, project: null }; try { - const workspace = localDb - .select() - .from(workspaces) - .where(eq(workspaces.id, workspaceId)) - .get(); + const workspace = + localDb + .select() + .from(workspaces) + .where(eq(workspaces.id, workspaceId)) + .get() ?? null; const worktree = workspace?.worktreeId - ? localDb + ? (localDb .select() .from(worktrees) .where(eq(worktrees.id, workspace.worktreeId)) - .get() - : undefined; - return getWorkspaceName({ workspace, worktree }); + .get() ?? null) + : null; + const project = workspace?.projectId + ? (localDb + .select() + .from(projects) + .where(eq(projects.id, workspace.projectId)) + .get() ?? null) + : null; + return { workspace, worktree, project }; } catch (error) { - console.error("[notifications] Failed to get workspace name:", error); - return "Workspace"; + console.error("[notifications] Failed to read workspace records:", error); + return { workspace: null, worktree: null, project: null }; } } +function getWorkspaceNameFromDb(workspaceId: string | undefined): string { + const { workspace, worktree } = getWorkspaceRecords(workspaceId); + return getWorkspaceName({ workspace, worktree }); +} + +function buildAivisVars(event: AgentLifecycleEvent) { + const { workspace, worktree, project } = getWorkspaceRecords( + event.workspaceId, + ); + const tabs = appState.data?.tabsState?.tabs; + const panes = appState.data?.tabsState?.panes; + const tab = event.tabId ? tabs?.find((t) => t.id === event.tabId) : undefined; + const pane = event.paneId ? panes?.[event.paneId] : undefined; + const branch = workspace?.branch ?? worktree?.branch ?? ""; + const worktreeName = worktree?.branch ?? ""; + return { + branch, + workspace: workspace?.name || branch || "", + worktree: worktreeName, + project: project?.name ?? "", + tab: (tab?.userTitle?.trim() || tab?.name) ?? "", + pane: pane?.name ?? "", + event: event.eventType, + }; +} + let currentWindow: BrowserWindow | null = null; +let mainWindowCleanup: (() => void) | null = null; +let notificationsInitialized = false; +let notificationsServer: Server | null = null; +let notificationManager: NotificationManager | null = null; +let agentLifecycleListener: ((event: AgentLifecycleEvent) => void) | null = + null; +let terminalExitListener: + | ((event: { + paneId: string; + exitCode: number; + signal?: number; + reason?: "killed" | "exited" | "error"; + }) => void) + | null = null; + +/** Tear down main window resources (notification server, IPC, etc.) + * without destroying the BrowserWindow itself. Called from before-quit + * tray-stay-alive path where win.destroy() skips close events. */ +export function cleanupMainWindowResources(): void { + mainWindowCleanup?.(); + mainWindowCleanup = null; + cleanupNotifications(); +} + +function addWindowLifecycleBreadcrumb( + message: string, + data?: Record, +): void { + Sentry.addBreadcrumb({ + category: "window.lifecycle", + level: "info", + message, + data, + }); +} // Routers receive this getter so they always see the current window, not a stale reference const getWindow = () => currentWindow; @@ -70,12 +149,23 @@ const getWindow = () => currentWindow; const forceRepaint = (win: BrowserWindow) => { if (win.isDestroyed()) return; win.webContents.invalidate(); - if (win.isMaximized() || win.isFullScreen()) return; - const [width, height] = win.getSize(); - win.setSize(width + 1, height); - setTimeout(() => { - if (!win.isDestroyed()) win.setSize(width, height); - }, 32); + if (win.isFullScreen()) { + win.setFullScreen(false); + setTimeout(() => { + if (!win.isDestroyed()) win.setFullScreen(true); + }, 100); + } else if (win.isMaximized()) { + win.unmaximize(); + setTimeout(() => { + if (!win.isDestroyed()) win.maximize(); + }, 100); + } else { + const [width, height] = win.getSize(); + win.setSize(width + 1, height); + setTimeout(() => { + if (!win.isDestroyed()) win.setSize(width, height); + }, 32); + } }; // GPU process restarts don't repaint existing compositor layers automatically. @@ -87,9 +177,129 @@ app.on("child-process-gone", (_event, details) => { } }); +// Re-apply vibrancy when the OS dark/light appearance changes. The +// computed setBackgroundColor depends on isDark so the window would +// otherwise keep the previous tint until the user interacted with the +// vibrancy settings again. Only relevant on macOS, but nativeTheme is +// harmless to subscribe to on other platforms. +nativeTheme.on("updated", () => { + const isDark = nativeTheme.shouldUseDarkColors; + const vibrancyState = appState.data?.vibrancyState ?? DEFAULT_VIBRANCY_STATE; + for (const win of windowManager.getAll().values()) { + applyVibrancy(win, vibrancyState, isDark); + } +}); + +export function initNotifications(): void { + if (notificationsInitialized) return; + + notificationManager = new NotificationManager({ + isSupported: () => Notification.isSupported(), + createNotification: (opts) => new Notification(opts), + playSound: playNotificationSound, + playAivis: (event) => { + const kind = + event.eventType === "PermissionRequest" ? "permission" : "complete"; + void playAivisNotification(kind, buildAivisVars(event)); + }, + onNotificationClick: (ids) => { + const window = getWindow(); + if (window && !window.isDestroyed()) { + window.show(); + window.focus(); + } else { + app.emit("activate"); + } + notificationsEmitter.emit(NOTIFICATION_EVENTS.FOCUS_TAB, ids); + }, + getVisibilityContext: () => { + const window = getWindow(); + const windowIsReady = window && !window.isDestroyed(); + return { + isFocused: windowIsReady ? window.isFocused() : false, + currentWorkspaceId: windowIsReady + ? extractWorkspaceIdFromUrl(window.webContents.getURL()) + : null, + tabsState: appState.data?.tabsState, + }; + }, + getWorkspaceName: getWorkspaceNameFromDb, + getNotificationTitle: (event) => + getNotificationTitle({ + tabId: event.tabId, + paneId: event.paneId, + tabs: appState.data?.tabsState?.tabs, + panes: appState.data?.tabsState?.panes, + }), + }); + notificationManager.start(); + + agentLifecycleListener = (event: AgentLifecycleEvent) => { + notificationManager?.handleAgentLifecycle(event); + }; + notificationsEmitter.on( + NOTIFICATION_EVENTS.AGENT_LIFECYCLE, + agentLifecycleListener, + ); + + terminalExitListener = (event) => { + notificationsEmitter.emit(NOTIFICATION_EVENTS.TERMINAL_EXIT, { + paneId: event.paneId, + exitCode: event.exitCode, + signal: event.signal, + reason: event.reason, + }); + }; + getWorkspaceRuntimeRegistry() + .getDefault() + .terminal.on("terminalExit", terminalExitListener); + + notificationsServer = notificationsApp.listen( + env.DESKTOP_NOTIFICATIONS_PORT, + "127.0.0.1", + () => { + console.log( + `[notifications] Listening on http://127.0.0.1:${env.DESKTOP_NOTIFICATIONS_PORT}`, + ); + }, + ); + + notificationsInitialized = true; +} + +function cleanupNotifications(): void { + if (!notificationsInitialized) return; + + if (agentLifecycleListener) { + notificationsEmitter.off( + NOTIFICATION_EVENTS.AGENT_LIFECYCLE, + agentLifecycleListener, + ); + agentLifecycleListener = null; + } + + if (terminalExitListener) { + getWorkspaceRuntimeRegistry() + .getDefault() + .terminal.off("terminalExit", terminalExitListener); + terminalExitListener = null; + } + + notificationManager?.dispose(); + notificationManager = null; + + notificationsServer?.close(); + notificationsServer = null; + + notificationsInitialized = false; +} + export async function MainWindow() { + const shouldPersistWindowPosition = isWindowPositionPersistenceEnabled(); const savedWindowState = loadWindowState(); - const initialBounds = getInitialWindowBounds(savedWindowState); + const initialBounds = getInitialWindowBounds(savedWindowState, { + restorePosition: shouldPersistWindowPosition, + }); let persistedZoomLevel = savedWindowState?.zoomLevel; const isDev = env.NODE_ENV === "development"; @@ -98,6 +308,13 @@ export async function MainWindow() { ? `${productName} — ${workspaceName}` : productName; + const initialVibrancyState = + appState.data?.vibrancyState ?? DEFAULT_VIBRANCY_STATE; + const vibrancyWindowOptions = getInitialVibrancyOptions( + initialVibrancyState, + nativeTheme.shouldUseDarkColors, + ); + const window = createWindow({ id: "main", title: windowTitle, @@ -108,7 +325,7 @@ export async function MainWindow() { minWidth: 400, minHeight: 400, show: false, - backgroundColor: nativeTheme.shouldUseDarkColors ? "#252525" : "#ffffff", + ...vibrancyWindowOptions, center: initialBounds.center, movable: true, resizable: true, @@ -129,6 +346,7 @@ export async function MainWindow() { createApplicationMenu(); currentWindow = window; + windowManager.register("main", window); // macOS Sequoia+: background throttling can corrupt GPU compositor layers if (PLATFORM.IS_MAC) { @@ -139,84 +357,34 @@ export async function MainWindow() { ipcHandler.attachWindow(window); } else { ipcHandler = createIPCHandler({ - router: createAppRouter(getWindow), + router: createAppRouter(getWindow, windowManager), windows: [window], }); + windowManager.setIpcHandler(ipcHandler); } - const server = notificationsApp.listen( - env.DESKTOP_NOTIFICATIONS_PORT, - "127.0.0.1", - () => { - console.log( - `[notifications] Listening on http://127.0.0.1:${env.DESKTOP_NOTIFICATIONS_PORT}`, - ); - }, - ); - - const notificationManager = new NotificationManager({ - isSupported: () => Notification.isSupported(), - createNotification: (opts) => new Notification(opts), - playSound: playNotificationSound, - onNotificationClick: (ids) => { - window.show(); - window.focus(); - notificationsEmitter.emit(NOTIFICATION_EVENTS.FOCUS_TAB, ids); - }, - getVisibilityContext: () => ({ - isFocused: window.isFocused(), - currentWorkspaceId: extractWorkspaceIdFromUrl( - window.webContents.getURL(), - ), - tabsState: appState.data?.tabsState, - }), - getWorkspaceName: getWorkspaceNameFromDb, - getNotificationTitle: (event) => - getNotificationTitle({ - tabId: event.tabId, - paneId: event.paneId, - tabs: appState.data?.tabsState?.tabs, - panes: appState.data?.tabsState?.panes, - }), - }); - notificationManager.start(); - - notificationsEmitter.on( - NOTIFICATION_EVENTS.AGENT_LIFECYCLE, - (event: AgentLifecycleEvent) => { - notificationManager.handleAgentLifecycle(event); - }, - ); - - // Forward low-volume terminal lifecycle events to the renderer via the existing - // notifications subscription. This is used only for correctness (e.g. clearing - // stuck agent lifecycle statuses when terminal panes aren't mounted). - getWorkspaceRuntimeRegistry() - .getDefault() - .terminal.on( - "terminalExit", - (event: { - paneId: string; - exitCode: number; - signal?: number; - reason?: "killed" | "exited" | "error"; - }) => { - notificationsEmitter.emit(NOTIFICATION_EVENTS.TERMINAL_EXIT, { - paneId: event.paneId, - exitCode: event.exitCode, - signal: event.signal, - reason: event.reason, - }); - }, - ); - - // macOS Sequoia+: occluded/minimized windows can lose compositor layers + // macOS Sequoia+: occluded/minimized windows can lose compositor layers, + // and NSVisualEffectView's vibrancy/native blur can detach while the + // window is in the Dock — restoring without re-applying leaves the + // window opaque even though the user still has vibrancy enabled. if (PLATFORM.IS_MAC) { + const reapplyVibrancyOnReshow = () => { + if (window.isDestroyed()) return; + applyVibrancy( + window, + appState.data?.vibrancyState ?? DEFAULT_VIBRANCY_STATE, + nativeTheme.shouldUseDarkColors, + ); + }; window.on("restore", () => { + addWindowLifecycleBreadcrumb("main window restored"); window.webContents.invalidate(); + reapplyVibrancyOnReshow(); }); window.on("show", () => { + addWindowLifecycleBreadcrumb("main window shown"); window.webContents.invalidate(); + reapplyVibrancyOnReshow(); }); } @@ -227,28 +395,34 @@ export async function MainWindow() { let initialized = false; let hasCompletedFirstLoad = false; let saveTimeout: ReturnType | null = null; + + const getWindowStateSnapshot = () => { + const isMaximized = window.isMaximized(); + const bounds = isMaximized ? window.getNormalBounds() : window.getBounds(); + const zoomLevel = window.webContents.getZoomLevel(); + return { + x: shouldPersistWindowPosition ? bounds.x : 0, + y: shouldPersistWindowPosition ? bounds.y : 0, + width: bounds.width, + height: bounds.height, + isMaximized, + zoomLevel, + }; + }; + const debouncedSave = () => { if (!initialized || window.isDestroyed()) return; if (saveTimeout) clearTimeout(saveTimeout); saveTimeout = setTimeout(() => { if (window.isDestroyed()) return; - const isMaximized = window.isMaximized(); - const bounds = isMaximized - ? window.getNormalBounds() - : window.getBounds(); - const zoomLevel = window.webContents.getZoomLevel(); - saveWindowState({ - x: bounds.x, - y: bounds.y, - width: bounds.width, - height: bounds.height, - isMaximized, - zoomLevel, - }); - persistedZoomLevel = zoomLevel; + const state = getWindowStateSnapshot(); + saveWindowState(state); + persistedZoomLevel = state.zoomLevel; }, 500); }; - window.on("move", debouncedSave); + if (shouldPersistWindowPosition) { + window.on("move", debouncedSave); + } window.on("resize", debouncedSave); window.webContents.on("zoom-changed", () => { setTimeout(() => { @@ -265,6 +439,14 @@ export async function MainWindow() { window.webContents.setZoomLevel(persistedZoomLevel); } + // Re-apply vibrancy now that the window is actually on-screen so the + // native CIGaussianBlur addon has a real NSVisualEffectView to mutate. + applyVibrancy( + window, + appState.data?.vibrancyState ?? DEFAULT_VIBRANCY_STATE, + nativeTheme.shouldUseDarkColors, + ); + if (!hasCompletedFirstLoad) { if (initialBounds.isMaximized) { window.maximize(); @@ -288,7 +470,19 @@ export async function MainWindow() { ); window.webContents.on("render-process-gone", (_event, details) => { + addWindowLifecycleBreadcrumb("renderer process gone", { + reason: details.reason, + exitCode: details.exitCode, + }); console.error("[main-window] Renderer process gone:", details); + if (window.isDestroyed()) return; + + if (details.reason === "oom") { + app.relaunch(); + app.exit(0); + } else if (details.reason !== "clean-exit") { + window.webContents.reload(); + } }); window.webContents.on("preload-error", (_event, preloadPath, error) => { @@ -297,29 +491,53 @@ export async function MainWindow() { console.error(` Error:`, error); }); - window.on("close", () => { - // Save window state first, before any cleanup - const isMaximized = window.isMaximized(); - const bounds = isMaximized ? window.getNormalBounds() : window.getBounds(); - const zoomLevel = window.webContents.getZoomLevel(); - saveWindowState({ - x: bounds.x, - y: bounds.y, - width: bounds.width, - height: bounds.height, - isMaximized, - zoomLevel, + // Handle mouse back/forward buttons for webview panes (Windows/Linux). + // `app-command` is not supported on macOS; macOS mouse buttons are handled + // via executeJavaScript injection in usePersistentWebview's dom-ready handler. + window.on("app-command", (_event, command) => { + const focusedGuest = webContents + .getAllWebContents() + .find((wc) => wc.getType() === "webview" && wc.isFocused()); + if (!focusedGuest) return; + + if (command === "browser-backward") { + focusedGuest.navigationHistory.goBack(); + } else if (command === "browser-forward") { + focusedGuest.navigationHistory.goForward(); + } + }); + + window.on("close", (event) => { + addWindowLifecycleBreadcrumb("main window closing", { + isDestroyed: window.isDestroyed(), + isVisible: window.isVisible(), }); - persistedZoomLevel = zoomLevel; + // Save window state first, before any cleanup + const state = getWindowStateSnapshot(); + saveWindowState(state); + persistedZoomLevel = state.zoomLevel; + // macOS: hide instead of destroy so "Open Superset" can reshow instantly. + // The quit flow uses app.exit(0) which bypasses close events entirely, + // so this hide path only runs for Cmd+W / red-X. + if (PLATFORM.IS_MAC) { + event.preventDefault(); + window.hide(); + return; + } + + doCleanup(); + }); + + function doCleanup() { browserManager.unregisterAll(); - server.close(); - notificationManager.dispose(); - notificationsEmitter.removeAllListeners(); - getWorkspaceRuntimeRegistry().getDefault().terminal.detachAllListeners(); ipcHandler?.detachWindow(window); + windowManager.unregister("main"); currentWindow = null; - }); + mainWindowCleanup = null; + } + + mainWindowCleanup = doCleanup; return window; } diff --git a/apps/desktop/src/preload/index.ts b/apps/desktop/src/preload/index.ts index 8a8ffcb6f28..0e3a3baf7e3 100644 --- a/apps/desktop/src/preload/index.ts +++ b/apps/desktop/src/preload/index.ts @@ -15,10 +15,29 @@ declare global { } } +// Tearoff: synchronously fetch tab data BEFORE React/Zustand initialize +const tearoffWindowId = (() => { + const arg = process.argv.find((a) => a.startsWith("--tearoff-window-id=")); + return arg ? arg.split("=")[1] : null; +})(); +// biome-ignore lint/suspicious/noExplicitAny: tearoff data is untyped at preload level +const tearoffData: any = tearoffWindowId + ? ipcRenderer.sendSync("get-tearoff-data", tearoffWindowId) + : null; + +// Synchronously fetch auth token for tearoff windows (skips async hydration) +const tearoffAuthToken: { token: string; expiresAt: string } | null = + tearoffWindowId + ? ipcRenderer.sendSync("get-tearoff-auth-token", tearoffWindowId) + : null; + const API = { sayHelloFromBridge: () => console.log("\nHello from bridgeAPI! 👋\n\n"), username: process.env.USER, appVersion: __APP_VERSION__, + tearoffWindowId, + tearoffData, + tearoffAuthToken, }; // Store mapping of user listeners to wrapped listeners for proper cleanup diff --git a/apps/desktop/src/preload/webview-compat.ts b/apps/desktop/src/preload/webview-compat.ts new file mode 100644 index 00000000000..1bd5b831ccb --- /dev/null +++ b/apps/desktop/src/preload/webview-compat.ts @@ -0,0 +1,21 @@ +import { webFrame } from "electron"; + +// react-dropzone (via file-selector) prefers DataTransferItem.getAsFileSystemHandle() +// on drop and then calls FileSystemFileHandle.getFile(), which raises NotAllowedError +// inside Electron guest web contents. Returning null from getAsFileSystemHandle() +// triggers the legacy DataTransferItem.getAsFile() / webkitGetAsEntry() fallback. +void webFrame + .executeJavaScript( + `(() => { + const proto = typeof DataTransferItem !== "undefined" ? DataTransferItem.prototype : null; + if (proto && typeof proto.getAsFileSystemHandle === "function") { + proto.getAsFileSystemHandle = async function() { return null; }; + } + })();`, + ) + .catch((error) => { + console.warn( + "[webview-compat] Failed to install getAsFileSystemHandle shim", + error, + ); + }); diff --git a/apps/desktop/src/renderer/assets/aivis-models/zonoko.jpg b/apps/desktop/src/renderer/assets/aivis-models/zonoko.jpg new file mode 100644 index 00000000000..bc66fb6f40a Binary files /dev/null and b/apps/desktop/src/renderer/assets/aivis-models/zonoko.jpg differ diff --git a/apps/desktop/src/renderer/assets/aivis-models/zonoko.wav b/apps/desktop/src/renderer/assets/aivis-models/zonoko.wav new file mode 100644 index 00000000000..7b5a8f61276 Binary files /dev/null and b/apps/desktop/src/renderer/assets/aivis-models/zonoko.wav differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\204.jpg" "b/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\204.jpg" new file mode 100644 index 00000000000..8f9370e15f5 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\204.jpg" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\204.wav" "b/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\204.wav" new file mode 100644 index 00000000000..fe897f13a50 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\204.wav" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\212.jpg" "b/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\212.jpg" new file mode 100644 index 00000000000..a7f20d48ca1 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\212.jpg" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\212.m4a" "b/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\212.m4a" new file mode 100644 index 00000000000..08ebcfad59e Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\343\201\276\343\201\212.m4a" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\343\202\213\343\201\252.jpg" "b/apps/desktop/src/renderer/assets/aivis-models/\343\202\213\343\201\252.jpg" new file mode 100644 index 00000000000..a76cc3bedb7 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\343\202\213\343\201\252.jpg" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\343\202\213\343\201\252.wav" "b/apps/desktop/src/renderer/assets/aivis-models/\343\202\213\343\201\252.wav" new file mode 100644 index 00000000000..3e69ee93eb1 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\343\202\213\343\201\252.wav" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\343\202\263\343\203\217\343\202\257.jpg" "b/apps/desktop/src/renderer/assets/aivis-models/\343\202\263\343\203\217\343\202\257.jpg" new file mode 100644 index 00000000000..6225fdf1914 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\343\202\263\343\203\217\343\202\257.jpg" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\343\202\263\343\203\217\343\202\257.m4a" "b/apps/desktop/src/renderer/assets/aivis-models/\343\202\263\343\203\217\343\202\257.m4a" new file mode 100644 index 00000000000..638d5f8b7c3 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\343\202\263\343\203\217\343\202\257.m4a" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\344\270\2552.jpg" "b/apps/desktop/src/renderer/assets/aivis-models/\344\270\2552.jpg" new file mode 100644 index 00000000000..39f028bd57e Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\344\270\2552.jpg" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\344\270\2552.m4a" "b/apps/desktop/src/renderer/assets/aivis-models/\344\270\2552.m4a" new file mode 100644 index 00000000000..f19ae61acfd Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\344\270\2552.m4a" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\345\244\251\346\267\261\343\202\267\343\203\216.jpg" "b/apps/desktop/src/renderer/assets/aivis-models/\345\244\251\346\267\261\343\202\267\343\203\216.jpg" new file mode 100644 index 00000000000..4159de21dfc Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\345\244\251\346\267\261\343\202\267\343\203\216.jpg" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\345\244\251\346\267\261\343\202\267\343\203\216.wav" "b/apps/desktop/src/renderer/assets/aivis-models/\345\244\251\346\267\261\343\202\267\343\203\216.wav" new file mode 100644 index 00000000000..81cbe161e83 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\345\244\251\346\267\261\343\202\267\343\203\216.wav" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\346\241\234\351\237\263.jpg" "b/apps/desktop/src/renderer/assets/aivis-models/\346\241\234\351\237\263.jpg" new file mode 100644 index 00000000000..4320c142763 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\346\241\234\351\237\263.jpg" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\346\241\234\351\237\263.wav" "b/apps/desktop/src/renderer/assets/aivis-models/\346\241\234\351\237\263.wav" new file mode 100644 index 00000000000..841d7eb0d91 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\346\241\234\351\237\263.wav" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\350\212\261\351\237\263.jpg" "b/apps/desktop/src/renderer/assets/aivis-models/\350\212\261\351\237\263.jpg" new file mode 100644 index 00000000000..e6df2f22f0b Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\350\212\261\351\237\263.jpg" differ diff --git "a/apps/desktop/src/renderer/assets/aivis-models/\350\212\261\351\237\263.wav" "b/apps/desktop/src/renderer/assets/aivis-models/\350\212\261\351\237\263.wav" new file mode 100644 index 00000000000..244c348f117 Binary files /dev/null and "b/apps/desktop/src/renderer/assets/aivis-models/\350\212\261\351\237\263.wav" differ diff --git a/apps/desktop/src/renderer/components/BootErrorBoundary/BootErrorBoundary.tsx b/apps/desktop/src/renderer/components/BootErrorBoundary/BootErrorBoundary.tsx index 15c347bdba4..57bb6956475 100644 --- a/apps/desktop/src/renderer/components/BootErrorBoundary/BootErrorBoundary.tsx +++ b/apps/desktop/src/renderer/components/BootErrorBoundary/BootErrorBoundary.tsx @@ -1,5 +1,6 @@ -import type { ReactNode } from "react"; +import type { ErrorInfo, ReactNode } from "react"; import { Component } from "react"; +import { reportError } from "renderer/lib/report-error"; export interface BootErrorBoundaryProps { children: ReactNode; @@ -21,8 +22,15 @@ export class BootErrorBoundary extends Component< return { hasError: true, error }; } - componentDidCatch(error: Error): void { - console.error("[renderer] Boot error boundary caught:", error); + componentDidCatch(error: Error, errorInfo: ErrorInfo): void { + console.error("[renderer] Boot error boundary caught:", error, errorInfo); + // Forward to Sentry so React render errors (which bypass window.onerror) + // actually show up in the dashboard. + reportError(error, { + severity: "fatal", + tags: { subsystem: "boot-error-boundary" }, + context: { componentStack: errorInfo.componentStack }, + }); this.props.onError?.(error); } diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ChatInputFooter/ChatInputFooter.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ChatInputFooter/ChatInputFooter.tsx index 14ec71c7380..9d403a4dbdf 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ChatInputFooter/ChatInputFooter.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ChatInputFooter/ChatInputFooter.tsx @@ -39,6 +39,8 @@ interface ChatInputFooterProps { setPermissionMode: React.Dispatch>; thinkingLevel: ThinkingLevel; setThinkingLevel: (level: ThinkingLevel) => void; + thinkingDisabledLevels?: Partial>; + thinkingHint?: string; slashCommands: SlashCommand[]; submitDisabled?: boolean; renderAttachment?: (file: FileUIPart & { id: string }) => ReactNode; @@ -54,6 +56,7 @@ interface ChatInputFooterProps { isQuestionSubmitting?: boolean; onQuestionRespond?: (questionId: string, answer: string) => Promise; onQuestionCancel?: () => void; + onSlashCommandSend?: (command: SlashCommand) => void; } export function ChatInputFooter({ @@ -71,6 +74,8 @@ export function ChatInputFooter({ setPermissionMode, thinkingLevel, setThinkingLevel, + thinkingDisabledLevels, + thinkingHint, slashCommands, submitDisabled, renderAttachment, @@ -193,6 +198,8 @@ export function ChatInputFooter({ setPermissionMode={setPermissionMode} thinkingLevel={thinkingLevel} setThinkingLevel={setThinkingLevel} + thinkingDisabledLevels={thinkingDisabledLevels} + thinkingHint={thinkingHint} canAbort={canAbort} submitStatus={submitStatus} submitDisabled={submitDisabled} diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ChatInputFooter/components/ChatComposerControls/ChatComposerControls.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ChatInputFooter/components/ChatComposerControls/ChatComposerControls.tsx index 7039c5cb8a9..f373ac17993 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ChatInputFooter/components/ChatComposerControls/ChatComposerControls.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ChatInputFooter/components/ChatComposerControls/ChatComposerControls.tsx @@ -26,6 +26,8 @@ interface ChatComposerControlsProps { setPermissionMode: React.Dispatch>; thinkingLevel: ThinkingLevel; setThinkingLevel: (level: ThinkingLevel) => void; + thinkingDisabledLevels?: Partial>; + thinkingHint?: string; canAbort: boolean; submitStatus?: ChatStatus; submitDisabled?: boolean; @@ -42,6 +44,8 @@ export function ChatComposerControls({ setPermissionMode, thinkingLevel, setThinkingLevel, + thinkingDisabledLevels, + thinkingHint, canAbort, submitStatus, submitDisabled, @@ -64,6 +68,8 @@ export function ChatComposerControls({ diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/MessageList/MessageList.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/MessageList/MessageList.tsx index 0859850c7a4..ab6b0470669 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/MessageList/MessageList.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/MessageList/MessageList.tsx @@ -7,7 +7,6 @@ import { import { Message, MessageContent } from "@superset/ui/ai-elements/message"; import { ShimmerLabel } from "@superset/ui/ai-elements/shimmer-label"; import type { ChatStatus, UIMessage } from "ai"; -import { isToolUIPart } from "ai"; import { FileIcon, FileTextIcon, ImageIcon } from "lucide-react"; import { useCallback } from "react"; import { HiMiniChatBubbleLeftRight } from "react-icons/hi2"; @@ -20,16 +19,6 @@ import { normalizeWorkspaceFilePath } from "../../utils/file-paths"; import { MessagePartsRenderer } from "../MessagePartsRenderer"; import { MessageScrollbackRail } from "./components/MessageScrollbackRail"; -function hasRenderableParts(parts: UIMessage["parts"]): boolean { - return parts.some( - (p) => - p.type === "text" || - p.type === "reasoning" || - (p as { type: string }).type === "error" || // "error" part type exists at runtime but is not yet in the UIMessage union - isToolUIPart(p), - ); -} - interface MessageListProps { messages: UIMessage[]; interruptedMessage?: InterruptedMessagePreview | null; @@ -82,7 +71,11 @@ export function MessageList({ const handleImageClick = useCallback( (url: string) => { if (!workspaceId) return; - addFileViewerPane(workspaceId, { filePath: url, isPinned: true }); + addFileViewerPane(workspaceId, { + filePath: url, + isPinned: true, + useRightSidebarOpenViewWidth: true, + }); }, [workspaceId, addFileViewerPane], ); @@ -208,6 +201,7 @@ export function MessageList({ addFileViewerPane(workspaceId, { filePath: normalizedPath, isPinned: true, + useRightSidebarOpenViewWidth: true, }); }} /> @@ -221,14 +215,10 @@ export function MessageList({ ); } - const showThinking = - isLastAssistant && isThinking && msg.parts.length === 0; - if (!showThinking && !hasRenderableParts(msg.parts)) return null; - return ( - {showThinking ? ( + {isLastAssistant && isThinking && msg.parts.length === 0 ? ( Thinking... @@ -254,7 +244,6 @@ export function MessageList({ parts={interruptedMessage.parts} isLastAssistant={false} isStreaming={false} - isInterrupted workspaceId={workspaceId} workspaceCwd={workspaceCwd} onAnswer={onAnswer} diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/MessagePartsRenderer/MessagePartsRenderer.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/MessagePartsRenderer/MessagePartsRenderer.tsx index d3388210636..bdbdaeb7591 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/MessagePartsRenderer/MessagePartsRenderer.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/MessagePartsRenderer/MessagePartsRenderer.tsx @@ -11,6 +11,7 @@ import { import type React from "react"; import { useCallback, useMemo } from "react"; import { electronTrpc } from "renderer/lib/electron-trpc"; +import { createShikiTheme } from "renderer/screens/main/components/WorkspaceView/utils/code-theme/shiki-theme"; import { useTheme } from "renderer/stores"; import { useTabsStore } from "renderer/stores/tabs/store"; import { READ_ONLY_TOOLS } from "../../constants"; @@ -29,7 +30,6 @@ interface MessagePartsRendererProps { parts: UIMessage["parts"]; isLastAssistant: boolean; isStreaming: boolean; - isInterrupted?: boolean; workspaceId?: string; workspaceCwd?: string; onAnswer?: ( @@ -42,7 +42,6 @@ export function MessagePartsRenderer({ parts, isLastAssistant, isStreaming, - isInterrupted, workspaceId, workspaceCwd, onAnswer, @@ -70,7 +69,10 @@ export function MessagePartsRenderer({ workspaceRoot: workspaceCwd, }); if (!normalizedPath) return; - addFileViewerPane(workspaceId, { filePath: normalizedPath }); + addFileViewerPane(workspaceId, { + filePath: normalizedPath, + useRightSidebarOpenViewWidth: true, + }); }, [addFileViewerPane, workspaceCwd, workspaceId], ); @@ -104,6 +106,14 @@ export function MessagePartsRenderer({ }), [theme?.type], ); + const shikiTheme = useMemo(() => { + if (!theme) return undefined; + const currentTheme = createShikiTheme(theme); + return [currentTheme, currentTheme] as [ + typeof currentTheme, + typeof currentTheme, + ]; + }, [theme]); const renderParts = ({ parts, @@ -126,6 +136,7 @@ export function MessagePartsRenderer({ isAnimating={isLastAssistant && isStreaming} mermaid={mermaidConfig} components={components} + shikiTheme={shikiTheme} />, ); i++; @@ -164,8 +175,6 @@ export function MessagePartsRenderer({ , ); @@ -194,8 +203,6 @@ export function MessagePartsRenderer({ , ); @@ -314,7 +321,6 @@ export function MessagePartsRenderer({ {displayText} diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ReadOnlyToolCall/ReadOnlyToolCall.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ReadOnlyToolCall/ReadOnlyToolCall.tsx index 634d7894a3d..e7baca5a4ab 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ReadOnlyToolCall/ReadOnlyToolCall.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ReadOnlyToolCall/ReadOnlyToolCall.tsx @@ -1,21 +1,23 @@ -import { ClickableFilePath } from "@superset/ui/ai-elements/clickable-file-path"; -import { ReadFileTool } from "@superset/ui/ai-elements/read-file-tool"; +import { ShimmerLabel } from "@superset/ui/ai-elements/shimmer-label"; import { ToolInput, ToolOutput } from "@superset/ui/ai-elements/tool"; -import { ToolCallRow } from "@superset/ui/ai-elements/tool-call-row"; +import { + Collapsible, + CollapsibleContent, + CollapsibleTrigger, +} from "@superset/ui/collapsible"; import { getToolName } from "ai"; import { + CheckIcon, + ExternalLinkIcon, FileIcon, FileSearchIcon, FolderTreeIcon, + Loader2Icon, SearchIcon, + XIcon, } from "lucide-react"; -import { electronTrpc } from "renderer/lib/electron-trpc"; -import { detectLanguage } from "shared/detect-language"; -import type { BundledLanguage } from "shiki"; -import { - getWorkspaceToolFilePath, - normalizeWorkspaceFilePath, -} from "../../utils/file-paths"; +import { useState } from "react"; +import { getWorkspaceToolFilePath } from "../../utils/file-paths"; import type { ToolPart } from "../../utils/tool-helpers"; import { getArgs, @@ -32,19 +34,51 @@ function stringify(value: unknown): string { } } +function toRecord(value: unknown): Record | undefined { + if (typeof value === "object" && value !== null && !Array.isArray(value)) { + return value as Record; + } + return undefined; +} + +function toStringValue(value: unknown): string | undefined { + if (typeof value === "string") return value; + if (typeof value === "number" || typeof value === "boolean") { + return String(value); + } + return undefined; +} + +function extractReadFileContent(output: unknown): string | undefined { + const direct = toStringValue(output); + if (direct) return direct; + + const record = toRecord(output); + if (!record) return undefined; + + const nestedResult = toRecord(record.result); + + return ( + toStringValue(record.content) ?? + toStringValue(record.text) ?? + toStringValue(record.stdout) ?? + toStringValue(record.data) ?? + toStringValue(nestedResult?.content) ?? + toStringValue(nestedResult?.text) ?? + toStringValue(nestedResult?.stdout) + ); +} + interface ReadOnlyToolCallProps { part: ToolPart; - workspaceId?: string; - workspaceCwd?: string; onOpenFileInPane?: (filePath: string) => void; } export function ReadOnlyToolCall({ part, - workspaceId, - workspaceCwd, onOpenFileInPane, }: ReadOnlyToolCallProps) { + const [isOpen, setIsOpen] = useState(false); const args = getArgs(part); const toolName = normalizeToolName(getToolName(part)); const output = @@ -58,44 +92,11 @@ export function ReadOnlyToolCall({ part.state !== "output-available" && part.state !== "output-error"; const displayState = toToolDisplayState(part); const isReadFileTool = toolName === "mastra_workspace_read_file"; + const readFileContent = isReadFileTool + ? extractReadFileContent(output) + : undefined; const hasDetails = part.input != null || output != null || isError; - const rawFilePath = isReadFileTool - ? String(args.path ?? args.filePath ?? args.file_path ?? args.file ?? "") - : ""; - const absoluteFilePath = rawFilePath - ? normalizeWorkspaceFilePath({ - filePath: rawFilePath, - workspaceRoot: workspaceCwd, - }) - : null; - - const fileQuery = electronTrpc.filesystem.readFile.useQuery( - { - workspaceId: workspaceId ?? "", - absolutePath: absoluteFilePath ?? "", - encoding: "utf-8", - }, - { - enabled: - isReadFileTool && !isPending && !!absoluteFilePath && !!workspaceId, - retry: false, - refetchOnWindowFocus: false, - staleTime: Infinity, - }, - ); - - const fileContent = fileQuery.data?.content as string | undefined; - const hasFileContent = fileContent !== undefined; - - const lineRange = hasFileContent - ? (() => { - // The disk read always returns the whole file, so report 1–N - const lineCount = fileContent.trimEnd().split("\n").length; - return `1–${lineCount}`; - })() - : null; - let title = "Read file"; let subtitle = String(args.path ?? args.filePath ?? args.query ?? ""); let Icon = FileIcon; @@ -152,76 +153,77 @@ export function ReadOnlyToolCall({ const filePath = getWorkspaceToolFilePath({ toolName, args }); const canOpenFile = Boolean(filePath && onOpenFileInPane); - // Prevent a flash of raw output while the disk read is in flight - if ( - isReadFileTool && - !isError && - !isPending && - !hasFileContent && - fileQuery.isLoading - ) { - return ( - - ); - } - - if (isReadFileTool && !isError && hasFileContent) { - const displayPath = absoluteFilePath ?? rawFilePath; - const filename = displayPath.split("/").pop() ?? displayPath; - return ( - onOpenFileInPane?.(filePath) - : undefined - } - /> - ); - } - - // For file-path tools (e.g. file_stat), make the filename clickable. - // Search queries and directory listings stay as plain text. - const descriptionNode = - canOpenFile && filePath && subtitle ? ( - onOpenFileInPane?.(filePath)} - /> - ) : ( - subtitle || undefined - ); - return ( - hasDetails && setIsOpen(open)} + open={hasDetails ? isOpen : false} > - {hasDetails ? ( -
- {part.input != null && } - {(output != null || isError) && ( - +
+ + + + {canOpenFile && filePath && ( + + )} +
+ {hasDetails && ( + + {isReadFileTool && !isError && readFileContent ? ( +
+
+								{readFileContent}
+							
+
+ ) : ( +
+ {part.input != null && } + {(output != null || isError) && ( + + )} +
)} -
- ) : undefined} -
+ + )} + ); } diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/SlashCommandMenu/SlashCommandMenu.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/SlashCommandMenu/SlashCommandMenu.tsx index 2d0b97bfae3..152348fc160 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/SlashCommandMenu/SlashCommandMenu.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/SlashCommandMenu/SlashCommandMenu.tsx @@ -28,8 +28,8 @@ export function SlashCommandMenu({ e.preventDefault()} onCloseAutoFocus={(e) => e.preventDefault()} > diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/ToolCallBlock.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/ToolCallBlock.tsx index 1e3d3357a83..f45ad5e8b9f 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/ToolCallBlock.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/ToolCallBlock.tsx @@ -3,7 +3,7 @@ import { FileDiffTool } from "@superset/ui/ai-elements/file-diff-tool"; import { WebFetchTool } from "@superset/ui/ai-elements/web-fetch-tool"; import { WebSearchTool } from "@superset/ui/ai-elements/web-search-tool"; import { getToolName } from "ai"; -import { FileIcon, FolderIcon, GlobeIcon } from "lucide-react"; +import { FileIcon, FolderIcon } from "lucide-react"; import { useCallback, useMemo } from "react"; import { posthog } from "renderer/lib/posthog"; import { useChangesStore } from "renderer/stores/changes"; @@ -35,13 +35,10 @@ import { ListProjectsToolCall } from "./components/ListProjectsToolCall"; import { ListTaskStatusesToolCall } from "./components/ListTaskStatusesToolCall"; import { ListTasksToolCall } from "./components/ListTasksToolCall"; import { ListWorkspacesToolCall } from "./components/ListWorkspacesToolCall"; -import { LspInspectToolCall } from "./components/LspInspectToolCall"; -import { SkillToolCall } from "./components/SkillToolCall"; import { StartAgentSessionToolCall } from "./components/StartAgentSessionToolCall"; import { SubagentToolCall } from "./components/SubagentToolCall"; import { SupersetToolCall } from "./components/SupersetToolCall"; import { SwitchWorkspaceToolCall } from "./components/SwitchWorkspaceToolCall"; -import { TaskWriteToolCall } from "./components/TaskWriteToolCall"; import { UpdateTaskToolCall } from "./components/UpdateTaskToolCall"; import { UpdateWorkspaceToolCall } from "./components/UpdateWorkspaceToolCall"; import { getExecuteCommandViewModel } from "./utils/getExecuteCommandViewModel"; @@ -53,8 +50,6 @@ interface ToolCallBlockProps { workspaceCwd?: string; sessionId?: string | null; organizationId?: string | null; - isStreaming?: boolean; - isInterrupted?: boolean; onAnswer?: ( toolCallId: string, answers: Record, @@ -73,8 +68,6 @@ export function ToolCallBlock({ workspaceCwd, sessionId, organizationId, - isStreaming, - isInterrupted, onAnswer, }: ToolCallBlockProps) { const args = getArgs(part); @@ -105,7 +98,10 @@ export function ToolCallBlock({ if (!workspaceId) return; const normalizedPath = normalizeFilePath(filePath); if (!normalizedPath) return; - addFileViewerPane(workspaceId, { filePath: normalizedPath }); + addFileViewerPane(workspaceId, { + filePath: normalizedPath, + useRightSidebarOpenViewWidth: true, + }); posthog.capture("chat_file_opened_from_tool", { workspace_id: workspaceId, session_id: sessionId ?? null, @@ -171,6 +167,7 @@ export function ToolCallBlock({ commitHash: diffPaneTarget?.commitHash, oldPath: diffPaneTarget?.oldPath, viewMode: "diff", + useRightSidebarOpenViewWidth: true, }); posthog.capture("chat_file_opened_from_tool", { workspace_id: workspaceId, @@ -452,20 +449,10 @@ export function ToolCallBlock({ ); } - // --- Web search → WebSearchTool (with results) or GenericToolCall (without) --- - if (toolName === "web_search" || toolName.includes("web_search")) { + // --- Web search → WebSearchTool --- + if (toolName === "web_search") { const { query, results } = getWebSearchViewModel({ args, result }); - if (results.length > 0) { - return ; - } - return ( - - ); + return ; } // --- Web fetch → WebFetchTool --- @@ -500,8 +487,6 @@ export function ToolCallBlock({ result={result} outputObject={outputObject} nestedResultObject={nestedResultObject} - isStreaming={isStreaming} - isInterrupted={isInterrupted} onAnswer={onAnswer} /> ); @@ -587,14 +572,7 @@ export function ToolCallBlock({ // --- Read-only exploration tools --- if (READ_ONLY_TOOLS.has(toolName)) { - return ( - - ); + return ; } // --- Destructive workspace tools --- @@ -618,12 +596,8 @@ export function ToolCallBlock({ return ; } - if (toolName === "lsp_inspect") { - return ; - } - if (toolName === "task_write") { - return ; + return ; } if (toolName === "task_check") { @@ -635,26 +609,7 @@ export function ToolCallBlock({ } if (toolName === "subagent") { - return ( - - ); - } - - if (toolName === "skill" || toolName === "load_skill") { - const skillName = - typeof args.name === "string" - ? args.name - : typeof args.command === "string" - ? args.command - : toolDisplayName; - return ; + return ; } // --- Fallback: generic tool UI --- diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/AskUserQuestionToolCall/AskUserQuestionToolCall.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/AskUserQuestionToolCall/AskUserQuestionToolCall.tsx index 5fb7b93c428..e136bd70331 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/AskUserQuestionToolCall/AskUserQuestionToolCall.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/AskUserQuestionToolCall/AskUserQuestionToolCall.tsx @@ -1,13 +1,9 @@ -import { ToolCallRow } from "@superset/ui/ai-elements/tool-call-row"; -import { - CheckIcon, - CircleXIcon, - ClockIcon, - MessageCircleQuestionIcon, - XIcon, -} from "lucide-react"; -import { useMemo } from "react"; +import { MessageResponse } from "@superset/ui/ai-elements/message"; +import { UserQuestionTool } from "@superset/ui/ai-elements/user-question-tool"; +import { MessageCircleQuestionIcon } from "lucide-react"; +import { useEffect, useMemo, useState } from "react"; import type { ToolPart } from "../../../../utils/tool-helpers"; +import { SupersetToolCall } from "../SupersetToolCall"; interface QuestionToolOption { label: string; @@ -27,8 +23,6 @@ interface AskUserQuestionToolCallProps { result: Record; outputObject?: Record; nestedResultObject?: Record; - isStreaming?: boolean; - isInterrupted?: boolean; onAnswer?: ( toolCallId: string, answers: Record, @@ -67,6 +61,7 @@ function toQuestionToolQuestions(value: unknown): QuestionToolQuestion[] { typeof optionRecord.description === "string" ? optionRecord.description.trim() : ""; + return description ? { label, description } : { label }; }) .filter((option): option is QuestionToolOption => option !== null) @@ -124,49 +119,37 @@ function findAnswerForQuestion({ return undefined; } -type QuestionStatus = "awaiting" | "answered" | "cancelled"; - -const QUESTION_STATUS_CONFIG: Record< - QuestionStatus, - { label: string; icon: typeof ClockIcon } -> = { - awaiting: { label: "Awaiting Response", icon: ClockIcon }, - answered: { label: "Answered", icon: CheckIcon }, - cancelled: { label: "Cancelled", icon: XIcon }, -}; - -function QuestionStatusDescription({ status }: { status: QuestionStatus }) { - const { label, icon: Icon } = QUESTION_STATUS_CONFIG[status]; - return ( - - - {label} - - ); -} +function buildQuestionMarkdown({ + questions, + answers, +}: { + questions: QuestionToolQuestion[]; + answers: Record; +}): string { + if (questions.length === 0) return ""; + + const lines: string[] = ["### Agent question"]; + for (const [index, question] of questions.entries()) { + lines.push(""); + if (questions.length > 1) { + lines.push(`#### ${index + 1}`); + } + if (question.header) { + lines.push(`_${question.header}_`); + } + lines.push(question.question); + + const answer = findAnswerForQuestion({ + answers, + questionText: question.question, + }); + if (answer) { + lines.push(""); + lines.push(`**Answer:** ${answer}`); + } + } -function toSingleQuestion( - args: Record, -): QuestionToolQuestion[] { - const question = - typeof args.question === "string" ? args.question.trim() : ""; - if (!question) return []; - - const options = Array.isArray(args.options) - ? args.options - .map((opt): QuestionToolOption | null => { - if (typeof opt !== "object" || opt === null) return null; - const o = opt as Record; - const label = typeof o.label === "string" ? o.label.trim() : ""; - if (!label) return null; - const description = - typeof o.description === "string" ? o.description.trim() : ""; - return description ? { label, description } : { label }; - }) - .filter((o): o is QuestionToolOption => o !== null) - : []; - - return [{ question, options }]; + return lines.join("\n").trim(); } export function AskUserQuestionToolCall({ @@ -175,126 +158,112 @@ export function AskUserQuestionToolCall({ result, outputObject, nestedResultObject, - isInterrupted, + onAnswer, }: AskUserQuestionToolCallProps) { - const questions = useMemo( - () => - Array.isArray(args.questions) - ? toQuestionToolQuestions(args.questions) - : toSingleQuestion(args), - [args], - ); - - const answers = useMemo( - () => - toQuestionToolAnswers( - toRecord(result.answers) ?? - toRecord(outputObject?.answers) ?? - toRecord(nestedResultObject?.answers), - ), - [nestedResultObject?.answers, outputObject?.answers, result.answers], - ); - - // Mastracode sends { isError: true, content: "..." } for aborted questions - const isResultError = result.isError === true; - - // Fallback for plain-string results and mastracode's { content: "User answered: " } format - const answerFallbackText = useMemo(() => { - // Error results are not answers - if (isResultError) return undefined; - if (typeof result.text === "string" && result.text.trim()) - return result.text.trim(); - if (typeof result.answer === "string" && result.answer.trim()) - return result.answer.trim(); - // ask_user tool returns { content: "User answered: ", isError: false } - if (typeof result.content === "string" && result.content.trim()) { - const raw = result.content.trim(); - const prefix = "User answered: "; - return raw.startsWith(prefix) ? raw.slice(prefix.length).trim() : raw; + const [optimisticAnswers, setOptimisticAnswers] = useState | null>(null); + const [isSubmittingLocally, setIsSubmittingLocally] = useState(false); + + useEffect(() => { + if (part.state === "output-available" || part.state === "output-error") { + setIsSubmittingLocally(false); } - return undefined; - }, [isResultError, result.text, result.answer, result.content]); - - const isCancelledByStop = - !!isInterrupted && - part.state !== "output-available" && - part.state !== "output-error"; - const isPending = - !isCancelledByStop && - part.state !== "output-available" && - part.state !== "output-error"; - const isCancelledByError = part.state === "output-error" || isResultError; - const hasAnswers = - Object.keys(answers).length > 0 || answerFallbackText !== undefined; - - const answeredQAs = useMemo( - () => - questions - .map((q) => ({ - question: q.question, - answer: findAnswerForQuestion({ answers, questionText: q.question }), - })) - .filter( - (qa): qa is { question: string; answer: string } => - qa.answer !== undefined, - ), - [questions, answers], - ); - - // No args available (tool_result-only path with input: {}) — nothing useful to show - if (questions.length === 0 && !isCancelledByError && !isCancelledByStop) - return null; - - const isAnswered = - !isPending && !isCancelledByError && !isCancelledByStop && hasAnswers; - const isCancelled = - !isPending && !isCancelledByError && !isCancelledByStop && !hasAnswers; + }, [part.state]); + + const questions = useMemo(() => { + return toQuestionToolQuestions(args.questions); + }, [args.questions]); + + const serverAnswers = useMemo(() => { + return toQuestionToolAnswers( + toRecord(result.answers) ?? + toRecord(outputObject?.answers) ?? + toRecord(nestedResultObject?.answers), + ); + }, [nestedResultObject?.answers, outputObject?.answers, result.answers]); + + const answers = optimisticAnswers ?? serverAnswers; + const markdown = buildQuestionMarkdown({ questions, answers }); + const hasOutput = + part.state === "output-available" || part.state === "output-error"; + const hasQuestions = questions.length > 0; + const canRespond = Boolean(onAnswer) && !hasOutput && !isSubmittingLocally; + + const messageBlock = markdown ? ( +
+ + {markdown} + +
+ ) : null; + + const handleSubmit = (submittedAnswers: Record): void => { + if (!onAnswer || isSubmittingLocally) return; + setOptimisticAnswers(submittedAnswers); + setIsSubmittingLocally(true); + + void Promise.resolve(onAnswer(part.toolCallId, submittedAnswers)).catch( + () => { + setOptimisticAnswers(null); + setIsSubmittingLocally(false); + }, + ); + }; + + if (!hasQuestions) { + return ( + + ); + } - // Fallback for plain-string result when questions array has one entry - const fallbackQA = - answeredQAs.length === 0 && answerFallbackText && questions[0] - ? { question: questions[0].question, answer: answerFallbackText } - : null; + if (hasOutput || !onAnswer || optimisticAnswers) { + return ( +
+ {messageBlock} + +
+ ); + } - const qasToShow = - answeredQAs.length > 0 ? answeredQAs : fallbackQA ? [fallbackQA] : []; + if (!canRespond) { + return ( +
+ {messageBlock} + +
+ ); + } return ( - - ) : isAnswered ? ( - - ) : isCancelled || isCancelledByError || isCancelledByStop ? ( - - ) : undefined - } - > - {isAnswered && qasToShow.length > 0 - ? qasToShow.map((qa) => ( -
-
{qa.question}
-
{qa.answer}
-
- )) - : (isCancelledByError || isCancelledByStop) && questions.length > 0 - ? questions.map((q) => ( -
-
- {q.question} -
-
- - Aborted by the user -
-
- )) - : undefined} -
+
+ {messageBlock} + handleSubmit({})} + /> +
); } diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/GenericToolCall/GenericToolCall.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/GenericToolCall/GenericToolCall.tsx index 25d4fdd1c4a..037f9e76077 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/GenericToolCall/GenericToolCall.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/GenericToolCall/GenericToolCall.tsx @@ -1,68 +1,83 @@ +import { ShimmerLabel } from "@superset/ui/ai-elements/shimmer-label"; import { ToolInput, ToolOutput } from "@superset/ui/ai-elements/tool"; -import { ToolCallRow } from "@superset/ui/ai-elements/tool-call-row"; -import { WrenchIcon } from "lucide-react"; +import { + Collapsible, + CollapsibleContent, + CollapsibleTrigger, +} from "@superset/ui/collapsible"; +import { CheckIcon, Loader2Icon, WrenchIcon, XIcon } from "lucide-react"; import type { ComponentType } from "react"; +import { useState } from "react"; import type { ToolPart } from "../../../../utils/tool-helpers"; import { getGenericToolCallState } from "./getGenericToolCallState"; type GenericToolCallProps = { part: ToolPart; toolName: string; - subtitle?: string; icon?: ComponentType<{ className?: string }>; }; -function getQueryFromInput(input: unknown): string | undefined { - if (input != null && typeof input === "object" && !Array.isArray(input)) { - const query = (input as Record).query; - if (typeof query === "string" && query.trim().length > 0) return query; - } - return undefined; -} - export function GenericToolCall({ part, toolName, - subtitle, icon: Icon = WrenchIcon, }: GenericToolCallProps) { - const { output, isError, isNotConfigured, displayState, errorText } = + const [isOpen, setIsOpen] = useState(false); + const { output, isError, displayState, errorText } = getGenericToolCallState(part); const isPending = part.state !== "output-available" && part.state !== "output-error"; const hasDetails = part.input != null || output != null || isError; - const query = getQueryFromInput(part.input); return ( - hasDetails && setIsOpen(open)} + open={hasDetails ? isOpen : false} > - {hasDetails ? ( -
- {query != null ? ( -
-

- Query -

-

{query}

-
- ) : ( - part.input != null && - )} - {(output != null || isError) && ( - - )} -
- ) : undefined} -
+ + + + {hasDetails && ( + +
+ {part.input != null && } + {(output != null || isError) && ( + + )} +
+
+ )} + ); } diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/GenericToolCall/getGenericToolCallState.ts b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/GenericToolCall/getGenericToolCallState.ts index b299fbc8cba..8ae55c160d8 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/GenericToolCall/getGenericToolCallState.ts +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/GenericToolCall/getGenericToolCallState.ts @@ -5,7 +5,6 @@ import { toToolDisplayState } from "../../../../utils/tool-helpers"; export type GenericToolCallState = { output: unknown; isError: boolean; - isNotConfigured: boolean; displayState: ToolDisplayState; errorText?: string; }; @@ -49,15 +48,9 @@ export function getGenericToolCallState(part: ToolPart): GenericToolCallState { } } - const isNotConfigured = - isError && - typeof errorText === "string" && - errorText.toLowerCase().includes("not configured"); - return { output, isError, - isNotConfigured, displayState, errorText, }; diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/LspInspectToolCall/LspInspectToolCall.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/LspInspectToolCall/LspInspectToolCall.tsx deleted file mode 100644 index 07bdbb60c43..00000000000 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/LspInspectToolCall/LspInspectToolCall.tsx +++ /dev/null @@ -1,50 +0,0 @@ -import { ToolInput, ToolOutput } from "@superset/ui/ai-elements/tool"; -import { ToolCallRow } from "@superset/ui/ai-elements/tool-call-row"; -import { SearchCheckIcon } from "lucide-react"; -import type { ToolPart } from "../../../../utils/tool-helpers"; -import { getArgs } from "../../../../utils/tool-helpers"; -import { getGenericToolCallState } from "../GenericToolCall/getGenericToolCallState"; - -interface LspInspectToolCallProps { - part: ToolPart; -} - -export function LspInspectToolCall({ part }: LspInspectToolCallProps) { - const args = getArgs(part); - const { output, isError, isNotConfigured, errorText } = - getGenericToolCallState(part); - const isPending = - part.state !== "output-available" && part.state !== "output-error"; - - const rawPath = String( - args.file_path ?? args.filePath ?? args.path ?? args.file ?? "", - ); - const fileName = rawPath.includes("/") - ? rawPath.split("/").pop() - : rawPath || undefined; - - const hasDetails = part.input != null || output != null || isError; - - return ( - - {hasDetails ? ( -
- {part.input != null && } - {(output != null || isError) && ( - - )} -
- ) : undefined} -
- ); -} diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/LspInspectToolCall/index.ts b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/LspInspectToolCall/index.ts deleted file mode 100644 index a8c95496e1c..00000000000 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/LspInspectToolCall/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { LspInspectToolCall } from "./LspInspectToolCall"; diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SkillToolCall/SkillToolCall.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SkillToolCall/SkillToolCall.tsx deleted file mode 100644 index 96aad45a47b..00000000000 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SkillToolCall/SkillToolCall.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import { ToolCallRow } from "@superset/ui/ai-elements/tool-call-row"; -import { ZapIcon } from "lucide-react"; -import type { ToolPart } from "../../../../utils/tool-helpers"; - -type SkillToolCallProps = { - part: ToolPart; - skillName: string; -}; - -export function SkillToolCall({ part, skillName }: SkillToolCallProps) { - const isError = part.state === "output-error"; - const isPending = - part.state !== "output-available" && part.state !== "output-error"; - - return ( - - {!isPending ? ( -
- {isError ? ( -

Failed to load skill

- ) : ( -

- Successfully loaded skill -

- )} -
- ) : undefined} -
- ); -} diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SkillToolCall/index.ts b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SkillToolCall/index.ts deleted file mode 100644 index b338dfa9f15..00000000000 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SkillToolCall/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { SkillToolCall } from "./SkillToolCall"; diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SubagentToolCall/SubagentToolCall.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SubagentToolCall/SubagentToolCall.tsx index d593b96bb2f..6470cf5d12c 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SubagentToolCall/SubagentToolCall.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SubagentToolCall/SubagentToolCall.tsx @@ -1,11 +1,13 @@ +import { ShimmerLabel } from "@superset/ui/ai-elements/shimmer-label"; import { - MessageResponse, - TOOL_CALL_MD_CLASSNAME, -} from "@superset/ui/ai-elements/message"; -import { ToolCallRow } from "@superset/ui/ai-elements/tool-call-row"; -import { BotIcon } from "lucide-react"; -import { useMemo } from "react"; -import { SubagentInnerToolCall } from "renderer/components/Chat/components/SubagentInnerToolCall"; + Collapsible, + CollapsibleContent, + CollapsibleTrigger, +} from "@superset/ui/collapsible"; +import { cn } from "@superset/ui/lib/utils"; +import { BotIcon, CheckIcon, Loader2Icon, XIcon } from "lucide-react"; +import { useId, useMemo, useState } from "react"; +import { MarkdownToggleContent } from "renderer/components/Chat/components/MarkdownToggleContent"; import type { ToolPart } from "../../../../utils/tool-helpers"; import { parseSubagentToolResult } from "./utils/parseSubagentToolResult"; @@ -13,9 +15,6 @@ interface SubagentToolCallProps { part: ToolPart; args: Record; result: Record; - workspaceId?: string; - workspaceCwd?: string; - onOpenFileInPane?: (filePath: string) => void; } function asString(value: unknown): string | null { @@ -28,10 +27,10 @@ export function SubagentToolCall({ part, args, result, - workspaceId, - workspaceCwd, - onOpenFileInPane, }: SubagentToolCallProps) { + const [isOpen, setIsOpen] = useState(false); + const [renderMarkdown, setRenderMarkdown] = useState(true); + const markdownToggleId = useId(); const isPending = part.state !== "output-available" && part.state !== "output-error"; const isError = @@ -43,61 +42,89 @@ export function SubagentToolCall({ const parsed = useMemo(() => parseSubagentToolResult(result), [result]); const hasDetails = - task.length > 0 || parsed.text.length > 0 || parsed.tools.length > 0; - - // Title: "Agent" (foreground) — agentType goes in description (muted) - const titleNode = ( - - Agent{" "} - {agentType} - - ); + task.length > 0 || + parsed.text.length > 0 || + parsed.tools.length > 0 || + Boolean(parsed.modelId) || + parsed.durationMs !== undefined; return ( - hasDetails && setIsOpen(open)} + open={hasDetails ? isOpen : false} > - {hasDetails ? ( -
- - {task} - - {parsed.tools.length > 0 ? ( -
- {parsed.tools.map((tool, index) => ( - - ))} -
- ) : null} - {parsed.text ? ( - +
+
+ {isPending ? ( + + ) : isError ? ( + + ) : ( + + )} +
+ + + {hasDetails && ( + +
+
{task}
+
+ {agentType} + {parsed.modelId ? ` • ${parsed.modelId}` : ""} + {parsed.durationMs !== undefined + ? ` • ${Math.round(parsed.durationMs)} ms` + : ""} +
+ {parsed.tools.length > 0 ? ( +
+ {parsed.tools.map((tool, index) => ( + + {tool.name} + + ))} +
+ ) : null} + {parsed.text ? ( + + ) : null} +
+
+ )} + ); } diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SubagentToolCall/utils/parseSubagentToolResult/parseSubagentToolResult.ts b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SubagentToolCall/utils/parseSubagentToolResult/parseSubagentToolResult.ts index eb6e9d98fa5..8d829331b38 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SubagentToolCall/utils/parseSubagentToolResult/parseSubagentToolResult.ts +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SubagentToolCall/utils/parseSubagentToolResult/parseSubagentToolResult.ts @@ -1,8 +1,6 @@ -export interface SubagentToolExecution { +interface SubagentToolExecution { name: string; isError: boolean; - args: Record | null; - result: string | null; } export interface SubagentToolResultSummary { @@ -28,45 +26,7 @@ function firstString(...values: unknown[]): string | null { return null; } -function parseDetailedToolCalls( - content: string, -): { tools: SubagentToolExecution[]; stripped: string } | null { - const match = content.match( - /\n([\s\S]*?)<\/subagent-tool-calls>/, - ); - if (!match) return null; - try { - const parsed = JSON.parse(match[1]); - if (!Array.isArray(parsed)) return null; - const tools = parsed - .filter( - (item): item is Record => - typeof item === "object" && item !== null, - ) - .map((item) => ({ - name: typeof item.name === "string" ? item.name : "tool", - isError: item.isError === true, - args: - typeof item.args === "object" && item.args !== null - ? (item.args as Record) - : null, - result: - typeof item.result === "string" - ? item.result - : item.result !== null && item.result !== undefined - ? String(item.result) - : null, - })); - const stripped = - content.slice(0, match.index) + - content.slice((match.index ?? 0) + match[0].length); - return { tools, stripped }; - } catch { - return null; - } -} - -function parseLegacyTools(value: string | undefined): SubagentToolExecution[] { +function parseTools(value: string | undefined): SubagentToolExecution[] { if (!value) return []; return value .split(",") @@ -78,9 +38,7 @@ function parseLegacyTools(value: string | undefined): SubagentToolExecution[] { const status = statusPart?.trim().toLowerCase() || "ok"; return { name, - isError: status === "error" || status === "failed" || status === "err", - args: null, - result: null, + isError: status === "error" || status === "failed", }; }); } @@ -91,17 +49,12 @@ export function parseSubagentToolResult( const record = asRecord(value); const textContent = firstString(record?.content, record?.result, record?.text) ?? ""; - - // Try to parse the detailed tool-calls block first - const detailed = parseDetailedToolCalls(textContent); - const workingContent = detailed ? detailed.stripped : textContent; - - const metaTagRegex = /\n?]+?)\s*\/>/i; - const match = workingContent.match(metaTagRegex); + const metaTagRegex = /]+?)\s*\/>/i; + const match = textContent.match(metaTagRegex); if (!match) { return { - text: workingContent.trim(), - tools: detailed?.tools ?? [], + text: textContent, + tools: [], }; } @@ -114,10 +67,10 @@ export function parseSubagentToolResult( const durationRaw = attrs.get("durationMs"); const durationMs = durationRaw ? Number(durationRaw) : Number.NaN; return { - text: workingContent.replace(metaTagRegex, "").trim(), + text: textContent.replace(metaTagRegex, "").trim(), modelId: attrs.get("modelId"), durationMs: Number.isFinite(durationMs) && durationMs >= 0 ? durationMs : undefined, - tools: detailed?.tools ?? parseLegacyTools(attrs.get("tools")), + tools: parseTools(attrs.get("tools")), }; } diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SupersetToolCall/SupersetToolCall.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SupersetToolCall/SupersetToolCall.tsx index c0788a38a6e..cd5e20ecc03 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SupersetToolCall/SupersetToolCall.tsx +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/SupersetToolCall/SupersetToolCall.tsx @@ -1,11 +1,12 @@ +import { ShimmerLabel } from "@superset/ui/ai-elements/shimmer-label"; import { - MessageResponse, - TOOL_CALL_MD_CLASSNAME, -} from "@superset/ui/ai-elements/message"; -import { ToolCallRow } from "@superset/ui/ai-elements/tool-call-row"; -import { WrenchIcon } from "lucide-react"; + Collapsible, + CollapsibleContent, + CollapsibleTrigger, +} from "@superset/ui/collapsible"; +import { CheckIcon, Loader2Icon, WrenchIcon, XIcon } from "lucide-react"; import type { ComponentType, ReactNode } from "react"; -import { useMemo } from "react"; +import { useMemo, useState } from "react"; import type { ToolPart } from "../../../../utils/tool-helpers"; type SupersetToolCallProps = { @@ -13,7 +14,6 @@ type SupersetToolCallProps = { toolName: string; icon?: ComponentType<{ className?: string }>; details?: ReactNode; - subtitle?: string; }; function stringifyValue(value: unknown): string { @@ -30,8 +30,8 @@ export function SupersetToolCall({ toolName, icon: Icon = WrenchIcon, details, - subtitle, }: SupersetToolCallProps) { + const [isOpen, setIsOpen] = useState(false); const output = "output" in part ? (part as { output?: unknown }).output : undefined; const outputObject = @@ -52,48 +52,60 @@ export function SupersetToolCall({ return "Tool failed"; }, [isError, output, outputError, outputObject?.message]); - const contentText = (() => { - if (isPending || isError) return null; - if (typeof output === "string" && output.trim()) return output.trim(); - if (outputObject) { - const c = outputObject.content ?? outputObject.text; - if (typeof c === "string" && c.trim()) return c.trim(); - } - return null; - })(); - - const hasDetails = Boolean(details) || isError || contentText != null; + const hasDetails = Boolean(details) || isError; return ( - hasDetails && setIsOpen(open)} + open={hasDetails ? isOpen : false} > - {hasDetails ? ( -
- {details ? ( -
- {details} -
- ) : null} - {isError && errorText ? ( -
- {errorText} -
- ) : contentText != null ? ( - +
+
+ {isPending ? ( + + ) : isError ? ( + + ) : ( + + )} +
+ + + {hasDetails ? ( + +
+ {details ? ( +
+ {details} +
+ ) : null} + {isError && errorText ? ( +
+ {errorText} +
+ ) : null} +
+
+ ) : null} + ); } diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/TaskWriteToolCall/TaskWriteToolCall.tsx b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/TaskWriteToolCall/TaskWriteToolCall.tsx deleted file mode 100644 index 9d4370eed53..00000000000 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/TaskWriteToolCall/TaskWriteToolCall.tsx +++ /dev/null @@ -1,59 +0,0 @@ -import { ListTodoIcon } from "lucide-react"; -import type { ToolPart } from "../../../../utils/tool-helpers"; -import { getArgs } from "../../../../utils/tool-helpers"; -import { SupersetToolCall } from "../SupersetToolCall"; - -interface TodoItem { - id: string; - content: string; - status: "pending" | "in_progress" | "completed"; - priority?: string; -} - -function toTodoItems(value: unknown): TodoItem[] { - if (!Array.isArray(value)) return []; - return value.filter( - (item): item is TodoItem => - typeof item === "object" && - item !== null && - typeof (item as TodoItem).content === "string", - ); -} - -function buildDescription(todos: TodoItem[]): string | undefined { - if (todos.length === 0) return undefined; - - const inProgress = todos.filter((t) => t.status === "in_progress").length; - const completed = todos.filter((t) => t.status === "completed").length; - const pending = todos.filter((t) => t.status === "pending").length; - - const parts: string[] = [ - `${todos.length} task${todos.length === 1 ? "" : "s"}`, - ]; - const statusParts: string[] = []; - if (inProgress > 0) statusParts.push(`${inProgress} in progress`); - if (completed > 0) statusParts.push(`${completed} completed`); - if (pending > 0) statusParts.push(`${pending} pending`); - if (statusParts.length > 0) parts.push(statusParts.join(" · ")); - - return parts.join(" · "); -} - -interface TaskWriteToolCallProps { - part: ToolPart; -} - -export function TaskWriteToolCall({ part }: TaskWriteToolCallProps) { - const args = getArgs(part); - const todos = toTodoItems(args.todos); - const description = buildDescription(todos); - - return ( - - ); -} diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/TaskWriteToolCall/index.ts b/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/TaskWriteToolCall/index.ts deleted file mode 100644 index fcf501bd5a4..00000000000 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/components/ToolCallBlock/components/TaskWriteToolCall/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { TaskWriteToolCall } from "./TaskWriteToolCall"; diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/hooks/useFocusPromptOnPane.ts b/apps/desktop/src/renderer/components/Chat/ChatInterface/hooks/useFocusPromptOnPane.ts index f5444622569..bac0383f03d 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/hooks/useFocusPromptOnPane.ts +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/hooks/useFocusPromptOnPane.ts @@ -3,11 +3,11 @@ import { useEffect } from "react"; export function useFocusPromptOnPane(isFocused: boolean) { const { textInput } = usePromptInputController(); - const { focus } = textInput; + const focusPrompt = textInput.focus; useEffect(() => { if (isFocused) { - focus(); + focusPrompt(); } - }, [isFocused, focus]); + }, [focusPrompt, isFocused]); } diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/utils/messageHelpers.ts b/apps/desktop/src/renderer/components/Chat/ChatInterface/utils/messageHelpers.ts deleted file mode 100644 index 7fc77f41791..00000000000 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/utils/messageHelpers.ts +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Returns true if an assistant message contains an ask_user_question/ask_user - * tool call that has already been answered (i.e. has a matching tool_result). - * - * Works with any message type that carries a `content: unknown[]` array — both - * the historical HistoryMessage and the display-layer ChatMessage. - */ -/** - * Returns true if an assistant message contains an ask_user_question/ask_user - * tool call that has NOT yet been answered (i.e. no matching tool_result). - */ -export function hasPendingQuestionToolCall(message: { - content: unknown[]; -}): boolean { - const questionCallIds = new Set(); - const resultIds = new Set(); - for (const part of message.content) { - const p = part as Record; - if (p.type === "tool_call") { - const name = typeof p.name === "string" ? p.name : ""; - if (name === "ask_user_question" || name === "ask_user") { - const id = typeof p.id === "string" ? p.id : ""; - if (id) questionCallIds.add(id); - } - } - if (p.type === "tool_result") { - const id = typeof p.id === "string" ? p.id : ""; - if (id) resultIds.add(id); - } - } - return ( - questionCallIds.size > 0 && - [...questionCallIds].every((id) => !resultIds.has(id)) - ); -} - -export function hasAnsweredQuestionToolCall(message: { - content: unknown[]; -}): boolean { - const questionCallIds = new Set(); - const resultIds = new Set(); - for (const part of message.content) { - const p = part as Record; - if (p.type === "tool_call") { - const name = typeof p.name === "string" ? p.name : ""; - if (name === "ask_user_question" || name === "ask_user") { - const id = typeof p.id === "string" ? p.id : ""; - if (id) questionCallIds.add(id); - } - } - if (p.type === "tool_result") { - const id = typeof p.id === "string" ? p.id : ""; - if (id) resultIds.add(id); - } - } - return ( - questionCallIds.size > 0 && - [...questionCallIds].some((id) => resultIds.has(id)) - ); -} diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/utils/thinking-levels.ts b/apps/desktop/src/renderer/components/Chat/ChatInterface/utils/thinking-levels.ts new file mode 100644 index 00000000000..b53217ba244 --- /dev/null +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/utils/thinking-levels.ts @@ -0,0 +1,48 @@ +import type { ThinkingLevel } from "@superset/ui/ai-elements/thinking-toggle"; + +const FORCED_LOW_THINKING_MODEL_PREFIXES = ["openai/gpt-5"] as const; + +export function requiresMinimumThinkingLevel(modelId?: string | null): boolean { + if (!modelId) return false; + return FORCED_LOW_THINKING_MODEL_PREFIXES.some((prefix) => + modelId.startsWith(prefix), + ); +} + +export function getEffectiveThinkingLevel( + thinkingLevel: ThinkingLevel, + modelId?: string | null, +): ThinkingLevel { + if (thinkingLevel === "off" && requiresMinimumThinkingLevel(modelId)) { + return "low"; + } + return thinkingLevel; +} + +export function getThinkingIndicatorLabel( + thinkingLevel: ThinkingLevel, +): string { + return thinkingLevel === "off" ? "Working..." : "Thinking..."; +} + +export function getForcedThinkingDisabledLevels( + modelId?: string | null, +): Partial> { + if (!requiresMinimumThinkingLevel(modelId)) { + return {}; + } + + return { + off: "GPT-5 models require at least Low reasoning", + }; +} + +export function getForcedThinkingHint( + modelId?: string | null, +): string | undefined { + if (!requiresMinimumThinkingLevel(modelId)) { + return undefined; + } + + return "GPT-5 models require at least Low reasoning."; +} diff --git a/apps/desktop/src/renderer/components/Chat/ChatInterface/utils/tool-helpers.ts b/apps/desktop/src/renderer/components/Chat/ChatInterface/utils/tool-helpers.ts index 2248a598674..b08aade3641 100644 --- a/apps/desktop/src/renderer/components/Chat/ChatInterface/utils/tool-helpers.ts +++ b/apps/desktop/src/renderer/components/Chat/ChatInterface/utils/tool-helpers.ts @@ -33,8 +33,6 @@ const TOOL_NAME_ALIASES: Record = { task_write: "task_write", task_check: "task_check", submit_plan: "submit_plan", - lsp_inspect: "lsp_inspect", - mastra_workspace_lsp_inspect: "lsp_inspect", // Legacy Superset MCP names create_worktree: "create_workspace", diff --git a/apps/desktop/src/renderer/components/Chat/components/MarkdownToggleContent/MarkdownToggleContent.tsx b/apps/desktop/src/renderer/components/Chat/components/MarkdownToggleContent/MarkdownToggleContent.tsx new file mode 100644 index 00000000000..4411123790f --- /dev/null +++ b/apps/desktop/src/renderer/components/Chat/components/MarkdownToggleContent/MarkdownToggleContent.tsx @@ -0,0 +1,52 @@ +import { MessageResponse } from "@superset/ui/ai-elements/message"; +import { Switch } from "@superset/ui/switch"; + +interface MarkdownToggleContentProps { + toggleId: string; + checked: boolean; + onCheckedChange: (checked: boolean) => void; + content: string; + labelClassName?: string; + markdownContainerClassName?: string; + plainContainerClassName?: string; +} + +export function MarkdownToggleContent({ + toggleId, + checked, + onCheckedChange, + content, + labelClassName = "flex cursor-pointer items-center gap-2 text-muted-foreground", + markdownContainerClassName = "max-h-64 overflow-auto rounded border bg-background/80 p-2", + plainContainerClassName = "max-h-64 overflow-auto rounded border bg-background/80 p-2 text-xs whitespace-pre-wrap break-words", +}: MarkdownToggleContentProps) { + return ( + <> + + {checked ? ( +
+ + {content} + +
+ ) : ( +
{content}
+ )} + + ); +} diff --git a/apps/desktop/src/renderer/components/Chat/components/MarkdownToggleContent/index.ts b/apps/desktop/src/renderer/components/Chat/components/MarkdownToggleContent/index.ts new file mode 100644 index 00000000000..fb38d303ed2 --- /dev/null +++ b/apps/desktop/src/renderer/components/Chat/components/MarkdownToggleContent/index.ts @@ -0,0 +1 @@ +export { MarkdownToggleContent } from "./MarkdownToggleContent"; diff --git a/apps/desktop/src/renderer/components/Chat/components/SubagentInnerToolCall/SubagentInnerToolCall.tsx b/apps/desktop/src/renderer/components/Chat/components/SubagentInnerToolCall/SubagentInnerToolCall.tsx deleted file mode 100644 index 7b3a93c847f..00000000000 --- a/apps/desktop/src/renderer/components/Chat/components/SubagentInnerToolCall/SubagentInnerToolCall.tsx +++ /dev/null @@ -1,322 +0,0 @@ -import { BashTool } from "@superset/ui/ai-elements/bash-tool"; -import { ClickableFilePath } from "@superset/ui/ai-elements/clickable-file-path"; -import { ReadFileTool } from "@superset/ui/ai-elements/read-file-tool"; -import { ToolCallRow } from "@superset/ui/ai-elements/tool-call-row"; -import { - CodeIcon, - FileIcon, - FileSearchIcon, - FileTextIcon, - FolderIcon, - GlobeIcon, - SearchIcon, - TerminalIcon, - WrenchIcon, -} from "lucide-react"; -import { type ComponentType, useMemo } from "react"; -import { getExecuteCommandViewModel } from "renderer/components/Chat/ChatInterface/components/ToolCallBlock/utils/getExecuteCommandViewModel"; -import { normalizeWorkspaceFilePath } from "renderer/components/Chat/ChatInterface/utils/file-paths"; -import { normalizeToolName } from "renderer/components/Chat/ChatInterface/utils/tool-helpers"; -import { detectLanguage } from "shared/detect-language"; -import type { BundledLanguage } from "shiki"; - -interface SubagentInnerToolCallProps { - name: string; - isError: boolean; - isPending?: boolean; - args: Record | null; - result: string | null; - workspaceId?: string; - workspaceCwd?: string; - onOpenFileInPane?: (filePath: string) => void; -} - -interface ToolMeta { - label: string; - icon: ComponentType<{ className?: string }>; -} - -const TOOL_META: Record = { - mastra_workspace_execute_command: { - label: "Bash", - icon: TerminalIcon, - }, - mastra_workspace_write_file: { label: "Write", icon: FileIcon }, - mastra_workspace_edit_file: { label: "Edit", icon: FileTextIcon }, - mastra_workspace_read_file: { label: "Read", icon: FileIcon }, - mastra_workspace_list_files: { label: "List Files", icon: FolderIcon }, - mastra_workspace_file_stat: { label: "Check file", icon: FileSearchIcon }, - mastra_workspace_search: { label: "Search", icon: SearchIcon }, - mastra_workspace_mkdir: { label: "Create Directory", icon: FolderIcon }, - mastra_workspace_delete: { label: "Delete", icon: FileIcon }, - ast_smart_edit: { label: "Smart Edit", icon: CodeIcon }, - web_fetch: { label: "Web Fetch", icon: GlobeIcon }, - web_search: { label: "Web Search", icon: GlobeIcon }, -}; - -function getToolMeta(toolName: string): ToolMeta { - return ( - TOOL_META[toolName] ?? { - label: toolName.replaceAll("_", " "), - icon: WrenchIcon, - } - ); -} - -/** Tools where the description is a file path (not a search query or URL). */ -const FILE_PATH_TOOLS = new Set([ - "mastra_workspace_write_file", - "mastra_workspace_edit_file", - "mastra_workspace_file_stat", - "mastra_workspace_delete", - "ast_smart_edit", -]); - -function getRawFilePath( - toolName: string, - args: Record, -): string | null { - if (FILE_PATH_TOOLS.has(toolName)) { - const raw = String( - args.path ?? args.filePath ?? args.file_path ?? args.file ?? "", - ); - return raw || null; - } - return null; -} - -function getDescription( - toolName: string, - args: Record | null, -): string | undefined { - if (!args) return undefined; - - let raw: string | undefined; - - switch (toolName) { - case "mastra_workspace_read_file": - case "mastra_workspace_write_file": - case "mastra_workspace_edit_file": - case "mastra_workspace_file_stat": - case "mastra_workspace_delete": - case "ast_smart_edit": - raw = - String( - args.path ?? args.filePath ?? args.file_path ?? args.file ?? "", - ) || undefined; - break; - case "mastra_workspace_list_files": - case "mastra_workspace_mkdir": - raw = - String( - args.path ?? - args.directory ?? - args.directoryPath ?? - args.directory_path ?? - args.root ?? - args.cwd ?? - "", - ) || undefined; - break; - case "mastra_workspace_search": - raw = - String( - args.query ?? - args.pattern ?? - args.regex ?? - args.substring_pattern ?? - args.text ?? - "", - ) || undefined; - break; - case "web_fetch": - raw = String(args.url ?? args.uri ?? "") || undefined; - break; - case "web_search": - raw = String(args.query ?? args.q ?? "") || undefined; - break; - default: - return undefined; - } - - if (!raw) return undefined; - - // For paths, show only the filename - if ( - raw.includes("/") && - toolName !== "mastra_workspace_search" && - toolName !== "web_fetch" && - toolName !== "web_search" - ) { - return raw.split("/").pop() ?? raw; - } - - return raw; -} - -/** - * The Mastra workspace read_file tool returns content in this format: - * /path/to/file (N bytes) - * 1\tline one - * 2\tline two - * - * Strip the header and line-number prefixes to get clean file content. - */ -function parseReadFileResult(result: string): { - filename: string; - content: string; - lineCount: number; -} | null { - const lines = result.split("\n"); - if (lines.length < 2) return null; - - // First line: "path/to/file (N bytes)" or just content - const headerMatch = lines[0].match(/^(.+?)\s*\(\d+\s*bytes?\)\s*$/i); - if (!headerMatch) return null; - - const filename = headerMatch[1].trim(); - const contentLines = lines.slice(1); - - // Strip line-number prefix: " N\t" or " N→" - const stripped = contentLines.map((line) => { - const tabMatch = line.match(/^\s*\d+\t(.*)$/); - if (tabMatch) return tabMatch[1]; - const arrowMatch = line.match(/^\s*\d+\u2192(.*)$/); - if (arrowMatch) return arrowMatch[1]; - return line; - }); - - // Trim trailing blank lines - while (stripped.length > 0 && stripped[stripped.length - 1].trim() === "") { - stripped.pop(); - } - - return { - filename, - content: stripped.join("\n"), - lineCount: stripped.length, - }; -} - -export function SubagentInnerToolCall({ - name, - isError, - isPending = false, - args, - result, - workspaceCwd, - onOpenFileInPane, -}: SubagentInnerToolCallProps) { - const normalized = normalizeToolName(name); - const state = isPending - ? ("input-available" as const) - : isError - ? ("output-error" as const) - : ("output-available" as const); - - const { label, icon } = getToolMeta(normalized); - const description = getDescription(normalized, args); - const hasResult = result !== null && result.trim().length > 0; - - // Read file: parse and display using the shared ReadFileTool component - const parsedReadFile = useMemo(() => { - if (normalized !== "mastra_workspace_read_file") return null; - if (result === null || result.trim().length === 0) return null; - return parseReadFileResult(result); - }, [normalized, result]); - - if (normalized === "mastra_workspace_execute_command") { - const argsRecord = args ?? {}; - const resultRecord = result !== null ? { content: result } : {}; - const { command, stdout, stderr, exitCode } = getExecuteCommandViewModel({ - args: argsRecord, - result: resultRecord, - }); - return ( - - ); - } - if ( - normalized === "mastra_workspace_read_file" && - hasResult && - parsedReadFile - ) { - const parsed = parsedReadFile; - if (parsed) { - const filename = parsed.filename.split("/").pop() ?? parsed.filename; - const lineRange = `1–${parsed.lineCount}`; - const openInPane = onOpenFileInPane - ? () => { - const rawPath = String( - args?.path ?? - args?.filePath ?? - args?.file_path ?? - args?.file ?? - parsed.filename, - ); - const resolvedPath = - normalizeWorkspaceFilePath({ - filePath: rawPath, - workspaceRoot: workspaceCwd, - }) ?? rawPath; - onOpenFileInPane(resolvedPath); - } - : undefined; - return ( - - ); - } - } - - // For file-path tools, make the filename in the description clickable. - const rawFilePath = getRawFilePath(normalized, args ?? {}); - const resolvedFilePath = rawFilePath - ? (normalizeWorkspaceFilePath({ - filePath: rawFilePath, - workspaceRoot: workspaceCwd, - }) ?? rawFilePath) - : null; - - const descriptionNode = - resolvedFilePath && onOpenFileInPane && description ? ( - onOpenFileInPane(resolvedFilePath)} - /> - ) : ( - description - ); - - return ( - - {hasResult ? ( -
-
- {result} -
-
- ) : undefined} -
- ); -} diff --git a/apps/desktop/src/renderer/components/Chat/components/SubagentInnerToolCall/index.ts b/apps/desktop/src/renderer/components/Chat/components/SubagentInnerToolCall/index.ts deleted file mode 100644 index e2ec02722a2..00000000000 --- a/apps/desktop/src/renderer/components/Chat/components/SubagentInnerToolCall/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { SubagentInnerToolCall } from "./SubagentInnerToolCall"; diff --git a/apps/desktop/src/renderer/components/MarkdownEditor/MarkdownEditor.tsx b/apps/desktop/src/renderer/components/MarkdownEditor/MarkdownEditor.tsx index 36dc056dfca..c4f6dd8d639 100644 --- a/apps/desktop/src/renderer/components/MarkdownEditor/MarkdownEditor.tsx +++ b/apps/desktop/src/renderer/components/MarkdownEditor/MarkdownEditor.tsx @@ -1,6 +1,7 @@ import "highlight.js/styles/github-dark.css"; import "./markdown-editor.css"; +import { isEnterSubmit } from "@superset/ui/lib/keyboard"; import { cn } from "@superset/ui/utils"; import { Extension } from "@tiptap/core"; import { Blockquote } from "@tiptap/extension-blockquote"; @@ -272,11 +273,9 @@ export function MarkdownEditor({ class: cn("focus:outline-none min-h-[100px]", editorClassName), }, handleKeyDown: (_, event) => { - if ((event.metaKey || event.ctrlKey) && event.key === "Enter") { - onModEnter?.(); - return true; - } - return false; + if (!isEnterSubmit(event, { requireMod: true })) return false; + onModEnter?.(); + return true; }, handleClickOn: (_view, _pos, _node, _nodePos, event) => { const target = event.target as HTMLElement | null; diff --git a/apps/desktop/src/renderer/components/MarkdownRenderer/MarkdownRenderer.tsx b/apps/desktop/src/renderer/components/MarkdownRenderer/MarkdownRenderer.tsx index 7ca1d194c71..4584d2f4cec 100644 --- a/apps/desktop/src/renderer/components/MarkdownRenderer/MarkdownRenderer.tsx +++ b/apps/desktop/src/renderer/components/MarkdownRenderer/MarkdownRenderer.tsx @@ -6,6 +6,7 @@ import rehypeSanitize from "rehype-sanitize"; import remarkGfm from "remark-gfm"; import { useMarkdownStyle } from "renderer/stores"; import { SelectionContextMenu } from "./components"; +import { TrustedImageProvider } from "./components/SafeImage"; import { defaultConfig } from "./styles/default/config"; import { tufteConfig } from "./styles/tufte/config"; @@ -18,12 +19,18 @@ interface MarkdownRendererProps { content: string; style?: keyof typeof styleConfigs; className?: string; + scrollable?: boolean; + workspaceId?: string; + trustedImageRootPath?: string | null; } export function MarkdownRenderer({ content, style: styleProp, className, + scrollable = true, + workspaceId, + trustedImageRootPath, }: MarkdownRendererProps) { const globalStyle = useMarkdownStyle(); const style = styleProp ?? globalStyle; @@ -32,23 +39,29 @@ export function MarkdownRenderer({ return ( -
-
- - {content} - -
-
+
+
+ + {content} + +
+
+
); } diff --git a/apps/desktop/src/renderer/components/MarkdownRenderer/components/CodeBlock/CodeBlock.tsx b/apps/desktop/src/renderer/components/MarkdownRenderer/components/CodeBlock/CodeBlock.tsx index 9fd23c6e5fe..91cfe28d606 100644 --- a/apps/desktop/src/renderer/components/MarkdownRenderer/components/CodeBlock/CodeBlock.tsx +++ b/apps/desktop/src/renderer/components/MarkdownRenderer/components/CodeBlock/CodeBlock.tsx @@ -1,6 +1,10 @@ import { mermaid } from "@streamdown/mermaid"; -import { ShowCode } from "@superset/ui/ai-elements/show-code"; import type { ReactNode } from "react"; +import { Prism as SyntaxHighlighter } from "react-syntax-highlighter"; +import { + oneDark, + oneLight, +} from "react-syntax-highlighter/dist/esm/styles/prism"; import { useTheme } from "renderer/stores"; import { Streamdown } from "streamdown"; @@ -22,6 +26,7 @@ interface CodeBlockProps { export function CodeBlock({ children, className, node }: CodeBlockProps) { const theme = useTheme(); const isDark = theme?.type !== "light"; + const syntaxStyle = isDark ? oneDark : oneLight; const match = /language-(\w+)/.exec(className || ""); const language = match ? match[1] : undefined; @@ -51,12 +56,13 @@ export function CodeBlock({ children, className, node }: CodeBlockProps) { } return ( - + } + language={language ?? "text"} + PreTag="div" + className="rounded-md text-sm" + > + {codeString} + ); } diff --git a/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/SafeImage.tsx b/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/SafeImage.tsx index 91295eaa98f..22752f1cdce 100644 --- a/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/SafeImage.tsx +++ b/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/SafeImage.tsx @@ -1,4 +1,5 @@ import { LuImageOff } from "react-icons/lu"; +import { useResolvedImageSrc } from "./useResolvedImageSrc"; /** * Check if an image source is safe to load. @@ -48,7 +49,22 @@ interface SafeImageProps { * as blob: URLs. */ export function SafeImage({ src, alt, className }: SafeImageProps) { - if (!isSafeImageSrc(src)) { + const resolvedImage = useResolvedImageSrc(src); + + if (resolvedImage.isLoading) { + return ( +
+ Loading image... +
+ ); + } + + if ( + (src && !isSafeImageSrc(src) && !resolvedImage.src) || + resolvedImage.isBlocked + ) { return (
diff --git a/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/TrustedImageContext.tsx b/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/TrustedImageContext.tsx new file mode 100644 index 00000000000..8b5a36bc05d --- /dev/null +++ b/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/TrustedImageContext.tsx @@ -0,0 +1,24 @@ +import { createContext, type PropsWithChildren, useContext } from "react"; + +interface TrustedImageContextValue { + workspaceId?: string; + trustedImageRootPath?: string | null; +} + +const TrustedImageContext = createContext({}); + +export function TrustedImageProvider({ + children, + trustedImageRootPath, + workspaceId, +}: PropsWithChildren) { + return ( + + {children} + + ); +} + +export function useTrustedImageContext(): TrustedImageContextValue { + return useContext(TrustedImageContext); +} diff --git a/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/index.ts b/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/index.ts index 3a608bf50fd..71dcad82141 100644 --- a/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/index.ts +++ b/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/index.ts @@ -1 +1,2 @@ export { SafeImage } from "./SafeImage"; +export { TrustedImageProvider } from "./TrustedImageContext"; diff --git a/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/useResolvedImageSrc.ts b/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/useResolvedImageSrc.ts new file mode 100644 index 00000000000..8ad92318ae3 --- /dev/null +++ b/apps/desktop/src/renderer/components/MarkdownRenderer/components/SafeImage/useResolvedImageSrc.ts @@ -0,0 +1,88 @@ +import { useMemo } from "react"; +import { electronTrpc } from "renderer/lib/electron-trpc"; +import { resolveTrustedMemoImagePath } from "renderer/lib/workspace-memos"; +import { getImageMimeType } from "shared/file-types"; +import { useTrustedImageContext } from "./TrustedImageContext"; + +const MAX_IMAGE_SIZE = 10 * 1024 * 1024; + +interface ResolvedImageState { + isBlocked: boolean; + isLoading: boolean; + src?: string; +} + +export function useResolvedImageSrc( + source: string | undefined, +): ResolvedImageState { + const { trustedImageRootPath, workspaceId } = useTrustedImageContext(); + + const trimmedSource = source?.trim(); + const dataUrl = trimmedSource?.toLowerCase().startsWith("data:") + ? trimmedSource + : undefined; + + const trustedAbsolutePath = useMemo(() => { + if (!trimmedSource || dataUrl || !trustedImageRootPath) { + return null; + } + + return resolveTrustedMemoImagePath(trustedImageRootPath, trimmedSource); + }, [dataUrl, trimmedSource, trustedImageRootPath]); + + const mimeType = useMemo(() => { + if (!trustedAbsolutePath) { + return null; + } + return getImageMimeType(trustedAbsolutePath); + }, [trustedAbsolutePath]); + + const imageQuery = electronTrpc.filesystem.readFile.useQuery( + { + workspaceId: workspaceId ?? "", + absolutePath: trustedAbsolutePath ?? "", + maxBytes: MAX_IMAGE_SIZE, + }, + { + enabled: Boolean(workspaceId && trustedAbsolutePath && mimeType), + retry: false, + refetchOnWindowFocus: false, + staleTime: Infinity, + }, + ); + + if (dataUrl) { + return { + isBlocked: false, + isLoading: false, + src: dataUrl, + }; + } + + if (!trimmedSource || !trustedAbsolutePath || !mimeType) { + return { + isBlocked: true, + isLoading: false, + }; + } + + if (imageQuery.isLoading) { + return { + isBlocked: false, + isLoading: true, + }; + } + + if (imageQuery.error || !imageQuery.data || imageQuery.data.exceededLimit) { + return { + isBlocked: true, + isLoading: false, + }; + } + + return { + isBlocked: false, + isLoading: false, + src: `data:${mimeType};base64,${imageQuery.data.content}`, + }; +} diff --git a/apps/desktop/src/renderer/components/MarkdownRenderer/components/TipTapMarkdownRenderer/TipTapMarkdownRenderer.tsx b/apps/desktop/src/renderer/components/MarkdownRenderer/components/TipTapMarkdownRenderer/TipTapMarkdownRenderer.tsx index 9dbc3d2fd7a..ae60d82e335 100644 --- a/apps/desktop/src/renderer/components/MarkdownRenderer/components/TipTapMarkdownRenderer/TipTapMarkdownRenderer.tsx +++ b/apps/desktop/src/renderer/components/MarkdownRenderer/components/TipTapMarkdownRenderer/TipTapMarkdownRenderer.tsx @@ -1,12 +1,18 @@ import "highlight.js/styles/github-dark.css"; +import { toast } from "@superset/ui/sonner"; import { cn } from "@superset/ui/utils"; import { type Editor, EditorContent, useEditor } from "@tiptap/react"; import { BubbleMenu } from "@tiptap/react/menus"; import { type MutableRefObject, useEffect, useRef } from "react"; +import { + getWorkspaceMemoContextFromFilePath, + saveMemoImageFile, +} from "renderer/lib/workspace-memos"; import { useMarkdownStyle } from "renderer/stores"; import { defaultConfig } from "../../styles/default/config"; import { tufteConfig } from "../../styles/tufte/config"; +import { TrustedImageProvider } from "../SafeImage"; import { SelectionContextMenu } from "../SelectionContextMenu"; import { BubbleMenuToolbar } from "./components/BubbleMenuToolbar"; import { createMarkdownExtensions } from "./createMarkdownExtensions"; @@ -31,6 +37,9 @@ interface TipTapMarkdownRendererProps { editorRef?: MutableRefObject; onChange?: (value: string) => void; onSave?: () => void; + workspaceId?: string; + filePath?: string; + trustedImageRootPath?: string | null; } function getEditorMarkdown(editor: Editor): string { @@ -70,6 +79,9 @@ export function TipTapMarkdownRenderer({ editorRef, onChange, onSave, + workspaceId, + filePath, + trustedImageRootPath, }: TipTapMarkdownRendererProps) { const globalStyle = useMarkdownStyle(); const style = styleProp ?? globalStyle; @@ -77,9 +89,14 @@ export function TipTapMarkdownRenderer({ const articleRef = useRef(null); const onChangeRef = useRef(onChange); const onSaveRef = useRef(onSave); + const workspaceIdRef = useRef(workspaceId); + const filePathRef = useRef(filePath); + const lastAppliedValueRef = useRef(value); onChangeRef.current = onChange; onSaveRef.current = onSave; + workspaceIdRef.current = workspaceId; + filePathRef.current = filePath; const editor = useEditor({ immediatelyRender: false, @@ -91,7 +108,65 @@ export function TipTapMarkdownRenderer({ content: value, editorProps: { attributes: { - class: cn("focus:outline-none", editable && "min-h-[100px]"), + class: cn( + "focus:outline-none", + editable && "min-h-[100px] min-h-full cursor-text", + ), + }, + handlePaste: (view, event) => { + if (!editable) { + return false; + } + + const activeWorkspaceId = workspaceIdRef.current; + const activeFilePath = filePathRef.current; + if ( + !activeWorkspaceId || + !activeFilePath || + !getWorkspaceMemoContextFromFilePath(activeFilePath) + ) { + return false; + } + + const imageFile = Array.from(event.clipboardData?.items ?? []) + .find((item) => item.type.startsWith("image/")) + ?.getAsFile(); + if (!imageFile) { + return false; + } + + event.preventDefault(); + void saveMemoImageFile({ + workspaceId: activeWorkspaceId, + memoFilePath: activeFilePath, + file: imageFile, + }) + .then(({ relativePath }) => { + const imageNodeType = view.state.schema.nodes.image; + if (imageNodeType) { + const transaction = view.state.tr.replaceSelectionWith( + imageNodeType.create({ + src: relativePath, + alt: imageFile.name || "pasted image", + }), + ); + view.dispatch(transaction.scrollIntoView()); + return; + } + + view.dispatch( + view.state.tr + .insertText( + `![${imageFile.name || "pasted image"}](${relativePath})`, + ) + .scrollIntoView(), + ); + }) + .catch((error: Error) => { + toast.error(`Failed to paste image: ${error.message}`); + }); + + return true; }, }, onUpdate: ({ editor: currentEditor }) => { @@ -104,6 +179,12 @@ export function TipTapMarkdownRenderer({ return; } + if (lastAppliedValueRef.current === value) { + return; + } + + lastAppliedValueRef.current = value; + const currentValue = getEditorMarkdown(editor); if (currentValue === value) { return; @@ -137,33 +218,60 @@ export function TipTapMarkdownRenderer({ }, [editor, editorRef]); const content = ( -
- {editable && editor && ( - { - if (from === to) return false; - if (e.isActive("codeBlock")) return false; - return true; - }} +
+ {editable && editor && ( + { + if (from === to) return false; + if (e.isActive("codeBlock")) return false; + return true; + }} + > + + + )} +
- - - )} -
- -
-
+ {editable && editor && value.length === 0 ? ( + + ) : null} + + +
+ ); if (editable) { diff --git a/apps/desktop/src/renderer/components/RouteErrorBoundary/RouteErrorBoundary.tsx b/apps/desktop/src/renderer/components/RouteErrorBoundary/RouteErrorBoundary.tsx new file mode 100644 index 00000000000..a893f0d39ff --- /dev/null +++ b/apps/desktop/src/renderer/components/RouteErrorBoundary/RouteErrorBoundary.tsx @@ -0,0 +1,83 @@ +import type { ErrorInfo, ReactNode } from "react"; +import { Component } from "react"; +import { reportError } from "renderer/lib/report-error"; + +export interface RouteErrorBoundaryProps { + children: ReactNode; + /** Short identifier to tag the Sentry event (e.g. "settings", "workspace"). */ + routeName: string; + /** Optional override for the fallback UI. */ + fallback?: (error: Error, reset: () => void) => ReactNode; +} + +interface RouteErrorBoundaryState { + hasError: boolean; + error: Error | null; +} + +/** + * Subtree error boundary. Use for routes or large sections where a local + * failure shouldn't take down the whole app. + * + * Forwards errors to Sentry with `route` and `component-stack` context so + * crashes are attributable to the failing subtree. + */ +export class RouteErrorBoundary extends Component< + RouteErrorBoundaryProps, + RouteErrorBoundaryState +> { + state: RouteErrorBoundaryState = { hasError: false, error: null }; + + static getDerivedStateFromError(error: Error): RouteErrorBoundaryState { + return { hasError: true, error }; + } + + componentDidCatch(error: Error, errorInfo: ErrorInfo): void { + console.error( + `[route-error-boundary:${this.props.routeName}] caught:`, + error, + errorInfo, + ); + reportError(error, { + severity: "error", + tags: { subsystem: "route-error-boundary", route: this.props.routeName }, + context: { componentStack: errorInfo.componentStack }, + }); + } + + private reset = () => { + this.setState({ hasError: false, error: null }); + }; + + render(): ReactNode { + if (!this.state.hasError || !this.state.error) { + return this.props.children; + } + + if (this.props.fallback) { + return this.props.fallback(this.state.error, this.reset); + } + + return ( +
+
+

Something went wrong

+

+ This section crashed and was isolated to protect the rest of the + app. The error has been reported. +

+
+						{this.state.error.message}
+					
+ +
+
+ ); + } +} diff --git a/apps/desktop/src/renderer/components/RouteErrorBoundary/index.ts b/apps/desktop/src/renderer/components/RouteErrorBoundary/index.ts new file mode 100644 index 00000000000..0c5116f607d --- /dev/null +++ b/apps/desktop/src/renderer/components/RouteErrorBoundary/index.ts @@ -0,0 +1 @@ +export { RouteErrorBoundary } from "./RouteErrorBoundary"; diff --git a/apps/desktop/src/renderer/components/UpdateRequiredPage/UpdateRequiredPage.tsx b/apps/desktop/src/renderer/components/UpdateRequiredPage/UpdateRequiredPage.tsx index c6c4a763306..af5ab804bcb 100644 --- a/apps/desktop/src/renderer/components/UpdateRequiredPage/UpdateRequiredPage.tsx +++ b/apps/desktop/src/renderer/components/UpdateRequiredPage/UpdateRequiredPage.tsx @@ -90,11 +90,7 @@ export function UpdateRequiredPage({ -
)} diff --git a/apps/desktop/src/renderer/env.renderer.ts b/apps/desktop/src/renderer/env.renderer.ts index 445b0be9d46..6d11e526cc4 100644 --- a/apps/desktop/src/renderer/env.renderer.ts +++ b/apps/desktop/src/renderer/env.renderer.ts @@ -18,6 +18,7 @@ const envSchema = z.object({ NEXT_PUBLIC_API_URL: z.url().default("https://api.superset.sh"), NEXT_PUBLIC_WEB_URL: z.url().default("https://app.superset.sh"), NEXT_PUBLIC_MARKETING_URL: z.url().default("https://superset.sh"), + NEXT_PUBLIC_OPEN_LINK_URL: z.url().default("https://superset.m4gu.dev"), NEXT_PUBLIC_ELECTRIC_URL: z .url() .default("https://electric-proxy.avi-6ac.workers.dev"), @@ -39,6 +40,7 @@ const rawEnv = { NEXT_PUBLIC_API_URL: process.env.NEXT_PUBLIC_API_URL, NEXT_PUBLIC_WEB_URL: process.env.NEXT_PUBLIC_WEB_URL, NEXT_PUBLIC_MARKETING_URL: process.env.NEXT_PUBLIC_MARKETING_URL, + NEXT_PUBLIC_OPEN_LINK_URL: process.env.NEXT_PUBLIC_OPEN_LINK_URL, NEXT_PUBLIC_ELECTRIC_URL: process.env.NEXT_PUBLIC_ELECTRIC_URL, NEXT_PUBLIC_POSTHOG_KEY: import.meta.env.NEXT_PUBLIC_POSTHOG_KEY as | string diff --git a/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/AgentRuntimePicker.tsx b/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/AgentRuntimePicker.tsx new file mode 100644 index 00000000000..8af3ccfa16f --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/AgentRuntimePicker.tsx @@ -0,0 +1,291 @@ +import { Label } from "@superset/ui/label"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@superset/ui/select"; +import { cn } from "@superset/ui/utils"; +import type { AgentKind } from "main/todo-agent/types"; +import { + CLAUDE_EFFORT_SELECT_OPTIONS, + CLAUDE_MODEL_SELECT_OPTIONS, + type ClaudeEffortPick, + type ClaudeModelPick, + CODEX_EFFORT_SELECT_OPTIONS, + CODEX_MODEL_SELECT_OPTIONS, + type CodexEffortPick, + type CodexModelPick, + type CrushModelPick, + DEFAULT_SENTINEL, +} from "./claudeRuntimeOptions"; + +interface AgentRuntimePickerProps { + agentKind: AgentKind; + onAgentKindChange: (value: AgentKind) => void; + claudeModel: ClaudeModelPick; + claudeEffort: ClaudeEffortPick; + onClaudeModelChange: (value: ClaudeModelPick) => void; + onClaudeEffortChange: (value: ClaudeEffortPick) => void; + codexModel: CodexModelPick; + codexEffort: CodexEffortPick; + onCodexModelChange: (value: CodexModelPick) => void; + onCodexEffortChange: (value: CodexEffortPick) => void; + crushModel: CrushModelPick; + onCrushModelChange: (value: CrushModelPick) => void; + crushModels: string[]; + disabled?: boolean; + layout?: "stacked" | "row"; + compact?: boolean; +} + +const AGENT_KIND_OPTIONS: Array<{ + value: AgentKind; + label: string; + description: string; +}> = [ + { + value: "claude", + label: "Claude Code", + description: "Anthropic Claude Code CLI", + }, + { + value: "codex", + label: "Codex CLI", + description: "OpenAI Codex CLI (codex exec)", + }, + { + value: "crush", + label: "Crush", + description: "Charmbracelet Crush CLI (crush run)", + }, +]; + +export function AgentRuntimePicker({ + agentKind, + onAgentKindChange, + claudeModel, + claudeEffort, + onClaudeModelChange, + onClaudeEffortChange, + codexModel, + codexEffort, + onCodexModelChange, + onCodexEffortChange, + crushModel, + onCrushModelChange, + crushModels, + disabled, + layout = "row", + compact = true, +}: AgentRuntimePickerProps) { + const labelClass = compact ? "text-xs" : "text-sm"; + const triggerClass = compact ? "h-8 text-xs" : ""; + + return ( +
+
+ + +
+ + {agentKind === "crush" ? ( + + ) : ( + + )} +
+ ); +} + +function ClaudeCodexModelSection({ + agentKind, + claudeModel, + claudeEffort, + onClaudeModelChange, + onClaudeEffortChange, + codexModel, + codexEffort, + onCodexModelChange, + onCodexEffortChange, + disabled, + layout, + labelClass, + triggerClass, +}: { + agentKind: "claude" | "codex"; + claudeModel: ClaudeModelPick; + claudeEffort: ClaudeEffortPick; + onClaudeModelChange: (v: ClaudeModelPick) => void; + onClaudeEffortChange: (v: ClaudeEffortPick) => void; + codexModel: CodexModelPick; + codexEffort: CodexEffortPick; + onCodexModelChange: (v: CodexModelPick) => void; + onCodexEffortChange: (v: CodexEffortPick) => void; + disabled?: boolean; + layout?: "stacked" | "row"; + labelClass: string; + triggerClass: string; +}) { + const isClaude = agentKind === "claude"; + const modelOptions = isClaude + ? CLAUDE_MODEL_SELECT_OPTIONS + : CODEX_MODEL_SELECT_OPTIONS; + const effortOptions = isClaude + ? CLAUDE_EFFORT_SELECT_OPTIONS + : CODEX_EFFORT_SELECT_OPTIONS; + const currentModel = isClaude ? claudeModel : codexModel; + const currentEffort = isClaude ? claudeEffort : codexEffort; + const onModelChange = isClaude ? onClaudeModelChange : onCodexModelChange; + const onEffortChange = isClaude ? onClaudeEffortChange : onCodexEffortChange; + + return ( +
+
+ + +
+
+ + +
+
+ ); +} + +function CrushModelSection({ + crushModel, + onCrushModelChange, + crushModels, + disabled, + labelClass, + triggerClass, +}: { + crushModel: CrushModelPick; + onCrushModelChange: (v: CrushModelPick) => void; + crushModels: string[]; + disabled?: boolean; + labelClass: string; + triggerClass: string; +}) { + const grouped = groupByProvider(crushModels); + + return ( +
+ + +
+ ); +} + +function groupByProvider(models: string[]): Record { + const result: Record = {}; + for (const model of models) { + const sep = model.indexOf("/"); + const provider = sep > 0 ? model.slice(0, sep) : "other"; + if (!result[provider]) result[provider] = []; + result[provider].push(model); + } + return result; +} diff --git a/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/ClaudeRuntimePicker.tsx b/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/ClaudeRuntimePicker.tsx new file mode 100644 index 00000000000..531e82bdab1 --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/ClaudeRuntimePicker.tsx @@ -0,0 +1,95 @@ +import { Label } from "@superset/ui/label"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@superset/ui/select"; +import { cn } from "@superset/ui/utils"; +import { + CLAUDE_EFFORT_SELECT_OPTIONS, + CLAUDE_MODEL_SELECT_OPTIONS, + type ClaudeEffortPick, + type ClaudeModelPick, +} from "./claudeRuntimeOptions"; + +interface ClaudeRuntimePickerProps { + model: ClaudeModelPick; + effort: ClaudeEffortPick; + onModelChange: (value: ClaudeModelPick) => void; + onEffortChange: (value: ClaudeEffortPick) => void; + disabled?: boolean; + layout?: "stacked" | "row"; + compact?: boolean; +} + +/** + * Model + effort picker used by the TODO composer, the Schedule editor, + * and the global defaults tab of the preset dialog. The Select surface + * is shared so picking a new model / effort for a single TODO or for the + * global default uses the exact same controls (including localized + * labels and the "Claude Code の既定値" sentinel). + */ +export function ClaudeRuntimePicker({ + model, + effort, + onModelChange, + onEffortChange, + disabled, + layout = "row", + compact = true, +}: ClaudeRuntimePickerProps) { + const labelClass = compact ? "text-xs" : "text-sm"; + const triggerClass = compact ? "h-8 text-xs" : ""; + + return ( +
+
+
+ + +
+
+ + +
+
+
+ ); +} diff --git a/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/claudeRuntimeOptions.ts b/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/claudeRuntimeOptions.ts new file mode 100644 index 00000000000..9c924ac2377 --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/claudeRuntimeOptions.ts @@ -0,0 +1,394 @@ +import { + CLAUDE_EFFORT_OPTIONS, + CLAUDE_MODEL_OPTIONS, + CODEX_EFFORT_OPTIONS, + CODEX_MODEL_OPTIONS, + type TodoClaudeEffort, + type TodoClaudeModel, + type TodoCodexEffort, + type TodoCodexModel, +} from "main/todo-agent/types"; + +/** + * Sentinel string used in the model/effort Select to represent "no + * explicit choice — let Claude Code's own default cascade win". The + * Select value space can't hold `null`, so we round-trip through this + * sentinel and convert to/from `null` at the persistence boundary. + * + * `__default__` was chosen over the empty string because Radix's Select + * treats empty strings as "value missing", which disables the visual + * selection state and forces the placeholder to render instead of the + * "デフォルト" label we want to show. + */ +export const DEFAULT_SENTINEL = "__default__" as const; + +export type ClaudeModelPick = typeof DEFAULT_SENTINEL | TodoClaudeModel; +export type ClaudeEffortPick = typeof DEFAULT_SENTINEL | TodoClaudeEffort; + +interface Option { + value: V; + label: string; + description: string; +} + +export const CLAUDE_MODEL_SELECT_OPTIONS: ReadonlyArray< + Option +> = [ + { + value: DEFAULT_SENTINEL, + label: "デフォルト", + description: "Claude Code の設定をそのまま使う(--model を渡さない)", + }, + { + value: "opus", + label: "Opus(最新)", + description: "opus 系の最新モデルを alias で指定(Claude 4.x)", + }, + { + value: "sonnet", + label: "Sonnet(最新)", + description: "sonnet 系の最新モデル", + }, + { + value: "haiku", + label: "Haiku(最新)", + description: "haiku 系の最新モデル(軽量・高速)", + }, + { + value: "claude-opus-4-7", + label: "Opus 4.7(固定)", + description: "Opus を 4.7 に固定したいとき", + }, + { + value: "claude-sonnet-4-6", + label: "Sonnet 4.6(固定)", + description: "Sonnet を 4.6 に固定したいとき", + }, + { + value: "claude-haiku-4-5-20251001", + label: "Haiku 4.5(固定)", + description: "Haiku を 4.5 に固定したいとき", + }, +] as const; + +export const CLAUDE_EFFORT_SELECT_OPTIONS: ReadonlyArray< + Option +> = [ + { + value: DEFAULT_SENTINEL, + label: "デフォルト", + description: "Claude Code の既定値を尊重する", + }, + { + value: "low", + label: "low(軽量)", + description: "思考量を抑える。単純タスク向け", + }, + { + value: "medium", + label: "medium", + description: "中程度の思考量。バランス型", + }, + { + value: "high", + label: "high", + description: "深く考えさせたいとき", + }, + { + value: "xhigh", + label: "xhigh", + description: "最上位クラスの思考量", + }, + { + value: "max", + label: "max(最大)", + description: "上限まで思考。コストが高くなるので注意", + }, +] as const; + +/** + * Hoist both constants so importers don't have to pull them from the + * main-process types alongside UI-only helpers. + */ +export { + CLAUDE_EFFORT_OPTIONS, + CLAUDE_MODEL_OPTIONS, + CODEX_EFFORT_OPTIONS, + CODEX_MODEL_OPTIONS, + type TodoClaudeEffort, + type TodoClaudeModel, + type TodoCodexEffort, + type TodoCodexModel, +}; + +export function toPersistedModel( + pick: ClaudeModelPick, +): TodoClaudeModel | null { + return pick === DEFAULT_SENTINEL ? null : pick; +} + +export function toPersistedEffort( + pick: ClaudeEffortPick, +): TodoClaudeEffort | null { + return pick === DEFAULT_SENTINEL ? null : pick; +} + +/** + * Narrow a DB-side `string | null` back into the picker's discriminated + * value space. Unknown strings (persisted from an older build with a + * wider allowed set) fall back to the sentinel so the Select stays on + * "デフォルト" instead of rendering as empty. We log a warning so a + * silent data regression is at least visible in DevTools — users who + * had a now-retired model selected will notice the reset when they + * next save the TODO / schedule. + */ +export function fromPersistedModel( + persisted: string | null | undefined, +): ClaudeModelPick { + if (persisted == null) return DEFAULT_SENTINEL; + if ((CLAUDE_MODEL_OPTIONS as readonly string[]).includes(persisted)) { + return persisted as TodoClaudeModel; + } + console.warn( + "[ClaudeRuntimePicker] unknown persisted model, falling back to default:", + persisted, + ); + return DEFAULT_SENTINEL; +} + +export function fromPersistedEffort( + persisted: string | null | undefined, +): ClaudeEffortPick { + if (persisted == null) return DEFAULT_SENTINEL; + if ((CLAUDE_EFFORT_OPTIONS as readonly string[]).includes(persisted)) { + return persisted as TodoClaudeEffort; + } + console.warn( + "[ClaudeRuntimePicker] unknown persisted effort, falling back to default:", + persisted, + ); + return DEFAULT_SENTINEL; +} + +/** + * Resolve a DB-persisted model/effort value to the human-readable label + * the picker shows. Used by read-only views (session detail, schedule + * list) so the label matches what the user originally selected. + * + * null/undefined → "デフォルト" (matches the sentinel's label). + * Unknown values (persisted from an older build with a wider allowed set) + * surface the raw string so detail views don't silently lie about what is + * actually configured — we fall back to `fromPersisted*` only for the + * `DEFAULT_SENTINEL` case. + */ +export function getClaudeModelLabel( + persisted: string | null | undefined, +): string { + if (persisted == null) { + return ( + CLAUDE_MODEL_SELECT_OPTIONS.find((o) => o.value === DEFAULT_SENTINEL) + ?.label ?? "デフォルト" + ); + } + return ( + CLAUDE_MODEL_SELECT_OPTIONS.find((o) => o.value === persisted)?.label ?? + persisted + ); +} + +export function getClaudeEffortLabel( + persisted: string | null | undefined, +): string { + if (persisted == null) { + return ( + CLAUDE_EFFORT_SELECT_OPTIONS.find((o) => o.value === DEFAULT_SENTINEL) + ?.label ?? "デフォルト" + ); + } + return ( + CLAUDE_EFFORT_SELECT_OPTIONS.find((o) => o.value === persisted)?.label ?? + persisted + ); +} + +// ---- Codex CLI options ---- + +export type CodexModelPick = typeof DEFAULT_SENTINEL | TodoCodexModel; +export type CodexEffortPick = typeof DEFAULT_SENTINEL | TodoCodexEffort; + +export const CODEX_MODEL_SELECT_OPTIONS: ReadonlyArray> = + [ + { + value: DEFAULT_SENTINEL, + label: "デフォルト", + description: "Codex CLI の設定をそのまま使う(--model を渡さない)", + }, + { + value: "gpt-5.4", + label: "GPT-5.4 (current)", + description: "Latest frontier agentic coding model", + }, + { + value: "gpt-5.2-codex", + label: "GPT-5.2 Codex", + description: "Frontier agentic coding model", + }, + { + value: "gpt-5.1-codex-max", + label: "GPT-5.1 Codex Max", + description: "Codex-optimized flagship for deep and fast reasoning", + }, + { + value: "gpt-5.4-mini", + label: "GPT-5.4 Mini", + description: "Smaller frontier agentic coding model", + }, + { + value: "gpt-5.3-codex", + label: "GPT-5.3 Codex", + description: "Frontier Codex-optimized agentic coding model", + }, + { + value: "gpt-5.3-codex-spark", + label: "GPT-5.3 Codex Spark", + description: "Ultra-fast coding model", + }, + { + value: "gpt-5.2", + label: "GPT-5.2", + description: "Optimized for professional work and long-running agents", + }, + { + value: "gpt-5.1-codex-mini", + label: "GPT-5.1 Codex Mini", + description: "Optimized for codex. Cheaper, faster, but less capable", + }, + ] as const; + +export const CODEX_EFFORT_SELECT_OPTIONS: ReadonlyArray< + Option +> = [ + { + value: DEFAULT_SENTINEL, + label: "デフォルト", + description: "Codex CLI の既定値を尊重する", + }, + { + value: "none", + label: "none(推論なし)", + description: "推論を行わない", + }, + { + value: "minimal", + label: "minimal", + description: "最小限の推論", + }, + { + value: "low", + label: "low(軽量)", + description: "低めの推論量", + }, + { + value: "medium", + label: "medium", + description: "中程度の推論量(既定)", + }, + { + value: "high", + label: "high", + description: "高めの推論量", + }, + { + value: "xhigh", + label: "xhigh(最高)", + description: "最大推論量", + }, +] as const; + +export function toPersistedCodexModel( + pick: CodexModelPick, +): TodoCodexModel | null { + return pick === DEFAULT_SENTINEL ? null : pick; +} + +export function toPersistedCodexEffort( + pick: CodexEffortPick, +): TodoCodexEffort | null { + return pick === DEFAULT_SENTINEL ? null : pick; +} + +export function fromPersistedCodexModel( + persisted: string | null | undefined, +): CodexModelPick { + if (persisted == null) return DEFAULT_SENTINEL; + if ((CODEX_MODEL_OPTIONS as readonly string[]).includes(persisted)) { + return persisted as TodoCodexModel; + } + return DEFAULT_SENTINEL; +} + +export function fromPersistedCodexEffort( + persisted: string | null | undefined, +): CodexEffortPick { + if (persisted == null) return DEFAULT_SENTINEL; + if ((CODEX_EFFORT_OPTIONS as readonly string[]).includes(persisted)) { + return persisted as TodoCodexEffort; + } + return DEFAULT_SENTINEL; +} + +export function getCodexModelLabel( + persisted: string | null | undefined, +): string { + if (persisted == null) { + return ( + CODEX_MODEL_SELECT_OPTIONS.find((o) => o.value === DEFAULT_SENTINEL) + ?.label ?? "デフォルト" + ); + } + return ( + CODEX_MODEL_SELECT_OPTIONS.find((o) => o.value === persisted)?.label ?? + persisted + ); +} + +export function getCodexEffortLabel( + persisted: string | null | undefined, +): string { + if (persisted == null) { + return ( + CODEX_EFFORT_SELECT_OPTIONS.find((o) => o.value === DEFAULT_SENTINEL) + ?.label ?? "デフォルト" + ); + } + return ( + CODEX_EFFORT_SELECT_OPTIONS.find((o) => o.value === persisted)?.label ?? + persisted + ); +} + +// ---- Crush CLI options ---- +// +// Crush has 200+ models and no effort concept. The model list is dynamic +// (fetched from `crush models` via tRPC) so we use free-form strings +// instead of a const array. Only the default sentinel is static. + +export type CrushModelPick = typeof DEFAULT_SENTINEL | string; + +export function toPersistedCrushModel(pick: CrushModelPick): string | null { + return pick === DEFAULT_SENTINEL ? null : pick; +} + +export function fromPersistedCrushModel( + persisted: string | null | undefined, +): CrushModelPick { + if (persisted == null) return DEFAULT_SENTINEL; + return persisted; +} + +export function getCrushModelLabel( + persisted: string | null | undefined, +): string { + if (persisted == null) return "デフォルト"; + return persisted; +} diff --git a/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/index.ts b/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/index.ts new file mode 100644 index 00000000000..43d175affe2 --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/ClaudeRuntimePicker/index.ts @@ -0,0 +1,29 @@ +export { AgentRuntimePicker } from "./AgentRuntimePicker"; +export { ClaudeRuntimePicker } from "./ClaudeRuntimePicker"; +export { + CLAUDE_EFFORT_SELECT_OPTIONS, + CLAUDE_MODEL_SELECT_OPTIONS, + type ClaudeEffortPick, + type ClaudeModelPick, + CODEX_EFFORT_SELECT_OPTIONS, + CODEX_MODEL_SELECT_OPTIONS, + type CodexEffortPick, + type CodexModelPick, + type CrushModelPick, + DEFAULT_SENTINEL, + fromPersistedCodexEffort, + fromPersistedCodexModel, + fromPersistedCrushModel, + fromPersistedEffort, + fromPersistedModel, + getClaudeEffortLabel, + getClaudeModelLabel, + getCodexEffortLabel, + getCodexModelLabel, + getCrushModelLabel, + toPersistedCodexEffort, + toPersistedCodexModel, + toPersistedCrushModel, + toPersistedEffort, + toPersistedModel, +} from "./claudeRuntimeOptions"; diff --git a/apps/desktop/src/renderer/features/todo-agent/ScheduleFireToasts/ScheduleFireToasts.tsx b/apps/desktop/src/renderer/features/todo-agent/ScheduleFireToasts/ScheduleFireToasts.tsx new file mode 100644 index 00000000000..189232d3b9f --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/ScheduleFireToasts/ScheduleFireToasts.tsx @@ -0,0 +1,41 @@ +import { toast } from "@superset/ui/sonner"; +import { electronTrpc } from "renderer/lib/electron-trpc"; + +/** + * Subscribes to the scheduler's fire events in the main process and shows + * a toast for each one. Mounted once at the layout level so notifications + * surface regardless of whether the TodoManager dialog is open. + * + * Renders nothing. + */ +export function ScheduleFireToasts() { + const utils = electronTrpc.useUtils(); + + electronTrpc.todoAgent.schedule.onFire.useSubscription(undefined, { + onError: (err) => { + console.warn("[schedule-toasts] subscription error", err); + }, + onData: (event) => { + if (event.kind === "triggered") { + toast.success(`📅 ${event.scheduleName} を実行しました`, { + description: event.sessionId + ? "TODO Manager のタスクタブから進捗を確認できます" + : undefined, + }); + } else if (event.kind === "skipped") { + toast.info(`⏭️ ${event.scheduleName} をスキップしました`, { + description: event.message ?? undefined, + }); + } else if (event.kind === "failed") { + toast.error(`⚠️ ${event.scheduleName} の発火に失敗しました`, { + description: event.message ?? undefined, + }); + } + + void utils.todoAgent.schedule.listAll.invalidate(); + void utils.todoAgent.listAll.invalidate(); + }, + }); + + return null; +} diff --git a/apps/desktop/src/renderer/features/todo-agent/ScheduleFireToasts/index.ts b/apps/desktop/src/renderer/features/todo-agent/ScheduleFireToasts/index.ts new file mode 100644 index 00000000000..30835cdc560 --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/ScheduleFireToasts/index.ts @@ -0,0 +1 @@ +export { ScheduleFireToasts } from "./ScheduleFireToasts"; diff --git a/apps/desktop/src/renderer/features/todo-agent/TodoButton/TodoButton.tsx b/apps/desktop/src/renderer/features/todo-agent/TodoButton/TodoButton.tsx new file mode 100644 index 00000000000..d337a7fa70c --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/TodoButton/TodoButton.tsx @@ -0,0 +1,197 @@ +import { Button } from "@superset/ui/button"; +import { cn } from "@superset/ui/utils"; +import { memo, useCallback, useMemo, useState } from "react"; +import { HiMiniListBullet } from "react-icons/hi2"; +import { electronTrpc } from "renderer/lib/electron-trpc"; +import { TodoManager } from "../TodoManager"; +import { TodoModal } from "../TodoModal"; + +interface TodoButtonProps { + projectId?: string | null; + workspaceId: string; + worktreePath?: string | null; +} + +type StatusCategory = "running" | "queued" | "failed" | "paused"; + +interface StatusBadgeConfig { + label: string; + dot: string; + badge: string; + pulse?: boolean; +} + +const STATUS_BADGE_ORDER: StatusCategory[] = [ + "running", + "queued", + "failed", + "paused", +]; + +const STATUS_BADGE_META: Record = { + running: { + label: "実行中", + dot: "bg-amber-500", + badge: "bg-amber-500/15 text-amber-600 dark:text-amber-400", + pulse: true, + }, + queued: { + label: "待機中", + dot: "bg-primary", + badge: "bg-primary/15 text-primary", + }, + failed: { + label: "失敗/要確認", + dot: "bg-rose-500", + badge: "bg-rose-500/15 text-rose-600 dark:text-rose-400", + }, + paused: { + label: "一時停止", + dot: "bg-muted-foreground/60", + badge: "bg-muted text-muted-foreground", + }, +}; + +/** + * Entry point for the fork-local TODO autonomous agent feature. Sits + * immediately left of the WorkspaceRunButton in PresetsBar. + * + * Clicking the button opens the Agent-Manager-style TodoManager drawer. + * Session creation lives inside the manager so users always see the + * context of what already exists before creating something new. + */ +export const TodoButton = memo(function TodoButton({ + projectId, + workspaceId, +}: TodoButtonProps) { + const [managerOpen, setManagerOpen] = useState(false); + const [modalOpen, setModalOpen] = useState(false); + + const { data: allSessions } = electronTrpc.todoAgent.listAll.useQuery( + undefined, + { refetchInterval: 3000 }, + ); + + const counts = useMemo(() => { + const acc: Record = { + running: 0, + queued: 0, + failed: 0, + paused: 0, + }; + for (const s of allSessions ?? []) { + switch (s.status) { + case "preparing": + case "running": + case "verifying": + acc.running += 1; + break; + case "queued": + case "waiting": + // `waiting` は ScheduleWakeup で一時停止中のセッション。 + // scheduler が waitingUntil 経過後に自動で queued に戻すため、 + // slot を占有している扱いとして queued と同じバッジで集計する。 + acc.queued += 1; + break; + case "failed": + case "escalated": + acc.failed += 1; + break; + case "paused": + acc.paused += 1; + break; + default: + break; + } + } + return acc; + }, [allSessions]); + + const activeCount = + counts.running + counts.queued + counts.failed + counts.paused; + + const tooltip = useMemo(() => { + const parts = STATUS_BADGE_ORDER.filter((key) => counts[key] > 0).map( + (key) => `${STATUS_BADGE_META[key].label}: ${counts[key]}`, + ); + if (parts.length === 0) return "自律 TODO Agent Manager を開く"; + return `自律 TODO Agent Manager を開く (${parts.join(" / ")})`; + }, [counts]); + + const handleRequestNewTodo = useCallback(() => { + setModalOpen(true); + }, []); + + return ( + <> + + + {/* + Rendered as a sibling of TodoManager rather than inside it so + the two shadcn Dialogs stack independently. The modal opens + on top of the Manager without the outer Dialog's + click-outside handlers interfering. + */} + + + ); +}); diff --git a/apps/desktop/src/renderer/features/todo-agent/TodoButton/index.ts b/apps/desktop/src/renderer/features/todo-agent/TodoButton/index.ts new file mode 100644 index 00000000000..8a8676c99f1 --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/TodoButton/index.ts @@ -0,0 +1 @@ +export { TodoButton } from "./TodoButton"; diff --git a/apps/desktop/src/renderer/features/todo-agent/TodoManager/ChangesSidebar/ChangesSidebar.tsx b/apps/desktop/src/renderer/features/todo-agent/TodoManager/ChangesSidebar/ChangesSidebar.tsx new file mode 100644 index 00000000000..58f9f09c7d5 --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/TodoManager/ChangesSidebar/ChangesSidebar.tsx @@ -0,0 +1,550 @@ +import { ScrollArea } from "@superset/ui/scroll-area"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@superset/ui/tooltip"; +import { cn } from "@superset/ui/utils"; +import { useMemo, useState } from "react"; +import { + HiMiniArrowPath, + HiMiniChevronDown, + HiMiniChevronRight, +} from "react-icons/hi2"; +import { electronTrpc } from "renderer/lib/electron-trpc"; + +interface ChangesSidebarProps { + sessionId: string; + active: boolean; +} + +type DiffScope = "session" | "staged" | "unstaged" | "commit"; + +interface SelectedDiff { + key: string; + path: string; + scope: DiffScope; + commitSha?: string; + label: string; +} + +/** + * Right-side panel inside the TODO Agent Manager that surfaces the git + * work the worker produced in a session. Relies on the per-session + * `startHeadSha` the supervisor captures at run start to scope commits + * to "this session only" via `git log startHeadSha..HEAD`. + */ +export function ChangesSidebar({ sessionId, active }: ChangesSidebarProps) { + const [selected, setSelected] = useState(null); + const [commitsOpen, setCommitsOpen] = useState(true); + const [workingTreeOpen, setWorkingTreeOpen] = useState(true); + const [sessionFilesOpen, setSessionFilesOpen] = useState(true); + + const snapshot = electronTrpc.todoAgent.gitSnapshot.useQuery( + { sessionId }, + { + refetchInterval: active ? 3000 : false, + staleTime: 1000, + }, + ); + + const diffQuery = electronTrpc.todoAgent.gitFileDiff.useQuery( + selected + ? { + sessionId, + path: selected.path, + scope: selected.scope, + commitSha: selected.commitSha, + } + : { sessionId, path: "", scope: "session" as const }, + { enabled: !!selected, staleTime: 5_000 }, + ); + + const utils = electronTrpc.useUtils(); + const handleRefresh = () => { + void utils.todoAgent.gitSnapshot.invalidate({ sessionId }); + if (selected) { + void utils.todoAgent.gitFileDiff.invalidate({ + sessionId, + path: selected.path, + scope: selected.scope, + commitSha: selected.commitSha, + }); + } + }; + + const data = snapshot.data; + const commits = data?.commits ?? []; + const workingTree = data?.workingTree ?? []; + const sessionFiles = data?.sessionFiles ?? []; + const startHeadUnreachable = data?.startHeadUnreachable ?? false; + + const stagedCount = useMemo( + () => workingTree.filter((f) => f.stage === "staged").length, + [workingTree], + ); + const unstagedCount = useMemo( + () => workingTree.filter((f) => f.stage === "unstaged").length, + [workingTree], + ); + const untrackedCount = useMemo( + () => workingTree.filter((f) => f.stage === "untracked").length, + [workingTree], + ); + + return ( +
+
+
+
+ 変更 +
+ {data?.branch ? ( + + +
+ {data.branch} +
+
+ + {data.branch} + +
+ ) : ( +
+ (ブランチ取得中…) +
+ )} +
+ +
+ + +
+ {data?.startHeadSha && ( +
+
+ 開始時 HEAD +
+
+ {data.startHeadSha.slice(0, 12)} + {data.currentHeadSha && + data.currentHeadSha !== data.startHeadSha ? ( + <> + {" → "} + + {data.currentHeadSha.slice(0, 12)} + + + ) : null} +
+ {(data.ahead > 0 || data.behind > 0) && ( +
+ ↑ {data.ahead} · ↓ {data.behind} +
+ )} +
+ )} + + {!data?.startHeadSha && snapshot.isSuccess && ( +
+ 開始時 HEAD が記録されていません。Start して最初のターンに入ると + このパネルに差分とコミット履歴が表示されます。 +
+ )} + + {startHeadUnreachable && ( +
+ 開始時 HEAD + のコミットが見つかりません。ブランチがリセットされたか、 + オブジェクトが失われている可能性があります。 +
+ )} + + {/* Cumulative session delta (startHeadSha ↔ HEAD), shown + even when no new commits exist so branch switches / + rebases don't leave the sidebar looking empty. */} +
+ + {sessionFilesOpen && ( +
+ {!data?.startHeadSha ? ( +

+ 開始時 HEAD が未記録のため、差分を算出できません。 +

+ ) : sessionFiles.length === 0 ? ( +

+ 開始時からの差分はありません。 +

+ ) : ( + sessionFiles.map((file) => { + const key = `session:${file.path}`; + // Deletions ARE the diff at session scope — + // `git diff ..HEAD -- ` still emits + // a valid deletion patch, so keep every entry + // clickable. The working-tree section below + // rightly disables `D`, because there the file + // is already gone from the worktree. + return ( + + + + + + + {file.path} + + + + ); + }) + )} +
+ )} +
+ + {/* Commits since session start */} +
+ + {commitsOpen && ( +
+ {commits.length === 0 ? ( +

+ このセッションでの新規コミットはありません。 +

+ ) : ( + commits.map((commit) => ( + + + + + +
+ + {commit.subject} + + + {commit.shortSha} · {commit.authorName} + {commit.authorDate + ? ` · ${formatShortDate(commit.authorDate)}` + : ""} + +
+
+
+ )) + )} +
+ )} +
+ + {/* Working tree */} +
+ + {workingTreeOpen && ( +
+ {workingTree.length === 0 ? ( +

+ ワーキングツリーは clean です。 +

+ ) : ( + workingTree.map((file) => { + const key = `wt:${file.stage}:${file.path}`; + const scope: DiffScope = + file.stage === "staged" ? "staged" : "unstaged"; + const canDiff = + file.stage !== "untracked" && file.code !== "D"; + return ( + + + + + + + {file.path} + + + + ); + }) + )} +
+ )} +
+ + {/* Diff viewer for the currently selected file/commit */} + {selected && ( +
+
+ + +
+ {selected.scope === "commit" + ? `コミット ${selected.label}` + : `${scopeLabel(selected.scope)} · ${selected.label}`} +
+
+ + + {selected.scope === "commit" + ? `コミット ${selected.label}` + : `${scopeLabel(selected.scope)} · ${selected.label}`} + + +
+ +
+ +
+ )} +
+
+
+ ); +} + +function StatusBadge({ code, stage }: { code: string; stage: string }) { + const { letter, color } = useMemo(() => { + if (stage === "untracked") { + return { letter: "?", color: "text-muted-foreground" }; + } + switch (code) { + case "M": + return { letter: "M", color: "text-amber-500" }; + case "A": + return { letter: "A", color: "text-emerald-500" }; + case "D": + return { letter: "D", color: "text-rose-500" }; + case "R": + return { letter: "R", color: "text-primary" }; + default: + return { letter: code || "·", color: "text-muted-foreground" }; + } + }, [code, stage]); + return ( + + {letter} + + ); +} + +function DiffBlock({ + content, + loading, +}: { + content: string; + loading: boolean; +}) { + if (loading && !content) { + return ( +
読み込み中…
+ ); + } + if (!content.trim()) { + return ( +
+ 差分はありません。 +
+ ); + } + const lines = content.split("\n"); + return ( +
+			
+				{lines.map((line, idx) => (
+					
+ {line || " "} +
+ ))} +
+
+ ); +} + +function formatShortDate(iso: string): string { + if (!iso) return ""; + const d = new Date(iso); + if (Number.isNaN(d.getTime())) return iso; + const pad = (n: number) => n.toString().padStart(2, "0"); + return `${pad(d.getMonth() + 1)}/${pad(d.getDate())} ${pad(d.getHours())}:${pad(d.getMinutes())}`; +} + +function scopeLabel(scope: DiffScope): string { + switch (scope) { + case "staged": + return "staged"; + case "unstaged": + return "unstaged"; + case "session": + return "セッション全体"; + case "commit": + return "commit"; + } +} diff --git a/apps/desktop/src/renderer/features/todo-agent/TodoManager/ChangesSidebar/index.ts b/apps/desktop/src/renderer/features/todo-agent/TodoManager/ChangesSidebar/index.ts new file mode 100644 index 00000000000..047b3bbef5f --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/TodoManager/ChangesSidebar/index.ts @@ -0,0 +1 @@ +export { ChangesSidebar } from "./ChangesSidebar"; diff --git a/apps/desktop/src/renderer/features/todo-agent/TodoManager/PresetsDialog/PresetsDialog.tsx b/apps/desktop/src/renderer/features/todo-agent/TodoManager/PresetsDialog/PresetsDialog.tsx new file mode 100644 index 00000000000..58ca2c7c489 --- /dev/null +++ b/apps/desktop/src/renderer/features/todo-agent/TodoManager/PresetsDialog/PresetsDialog.tsx @@ -0,0 +1,632 @@ +import type { SelectTodoPromptPreset } from "@superset/local-db"; +import { Button } from "@superset/ui/button"; +import { Dialog, DialogContent, DialogTitle } from "@superset/ui/dialog"; +import { Input } from "@superset/ui/input"; +import { Label } from "@superset/ui/label"; +import { ScrollArea } from "@superset/ui/scroll-area"; +import { toast } from "@superset/ui/sonner"; +import { Textarea } from "@superset/ui/textarea"; +import { cn } from "@superset/ui/utils"; +import { type AgentKind, DEFAULT_AGENT_KIND } from "main/todo-agent/types"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { + HiMiniCog6Tooth, + HiMiniPlus, + HiMiniTrash, + HiMiniXMark, +} from "react-icons/hi2"; +import { electronTrpc } from "renderer/lib/electron-trpc"; +import { + AgentRuntimePicker, + type ClaudeEffortPick, + type ClaudeModelPick, + type CodexEffortPick, + type CodexModelPick, + type CrushModelPick, + DEFAULT_SENTINEL, + fromPersistedCrushModel, + fromPersistedEffort, + fromPersistedModel, + toPersistedCodexEffort, + toPersistedCodexModel, + toPersistedCrushModel, + toPersistedEffort, + toPersistedModel, +} from "../../ClaudeRuntimePicker"; + +interface PresetsDialogProps { + open: boolean; + onOpenChange: (open: boolean) => void; +} + +/** + * Manager for reusable TODO templates (system prompts, task templates, + * goal templates). Entered from the "設定" row at the bottom of the + * Agent Manager's left sidebar. Two-pane layout: list on the left, + * edit form on the right. + */ +type Tab = "presets" | "settings"; + +type PresetKind = "system" | "description" | "goal"; + +const KIND_LABEL: Record = { + system: "システム", + description: "タスク", + goal: "ゴール", +}; + +export function PresetsDialog({ open, onOpenChange }: PresetsDialogProps) { + const [tab, setTab] = useState("presets"); + + return ( + + + Agent Manager 設定 +
+
+ + +
+ +
+ {tab === "presets" ? : } +
+
+ ); +} + +function SettingsTab() { + const { data: settings } = electronTrpc.todoAgent.settings.get.useQuery(); + const updateMut = electronTrpc.todoAgent.settings.update.useMutation(); + const utils = electronTrpc.useUtils(); + const { data: crushModelsData } = + electronTrpc.todoAgent.crushModels.useQuery(undefined); + + const [maxIter, setMaxIter] = useState(10); + const [maxMin, setMaxMin] = useState(30); + const [maxConcurrent, setMaxConcurrent] = useState(1); + const [retentionDays, setRetentionDays] = useState(0); + const [defaultModel, setDefaultModel] = + useState(DEFAULT_SENTINEL); + const [defaultEffort, setDefaultEffort] = + useState(DEFAULT_SENTINEL); + const [defaultAgentKind, setDefaultAgentKind] = + useState(DEFAULT_AGENT_KIND); + const [defaultCodexModel, setDefaultCodexModel] = + useState(DEFAULT_SENTINEL); + const [defaultCodexEffort, setDefaultCodexEffort] = + useState(DEFAULT_SENTINEL); + const [defaultCrushModel, setDefaultCrushModel] = + useState(DEFAULT_SENTINEL); + + // Hydrate form state the first time settings arrive from the main + // process. A React Query background refetch (window focus, etc.) + // re-fires the query even when no persisted data changed; without + // this guard it would silently clobber in-progress edits in the + // form, reverting the user's dirty state and erasing their changes + // the moment the window regained focus. + const hydratedRef = useRef(false); + useEffect(() => { + if (!settings) return; + if (hydratedRef.current) return; + setMaxIter(settings.defaultMaxIterations); + setMaxMin(settings.defaultMaxWallClockMin); + setMaxConcurrent(settings.maxConcurrentTasks); + setRetentionDays(settings.sessionRetentionDays); + setDefaultModel(fromPersistedModel(settings.defaultClaudeModel ?? null)); + setDefaultEffort(fromPersistedEffort(settings.defaultClaudeEffort ?? null)); + setDefaultCrushModel( + fromPersistedCrushModel(settings.defaultCrushModel ?? null), + ); + hydratedRef.current = true; + }, [settings]); + + const dirty = + settings != null && + (maxIter !== settings.defaultMaxIterations || + maxMin !== settings.defaultMaxWallClockMin || + maxConcurrent !== settings.maxConcurrentTasks || + retentionDays !== settings.sessionRetentionDays || + toPersistedModel(defaultModel) !== + (settings.defaultClaudeModel ?? null) || + toPersistedEffort(defaultEffort) !== + (settings.defaultClaudeEffort ?? null) || + defaultAgentKind !== (settings.defaultAgentKind ?? DEFAULT_AGENT_KIND) || + toPersistedCodexModel(defaultCodexModel) !== + (settings.defaultCodexModel ?? null) || + toPersistedCodexEffort(defaultCodexEffort) !== + (settings.defaultCodexEffort ?? null) || + toPersistedCrushModel(defaultCrushModel) !== + (settings.defaultCrushModel ?? null)); + + const handleSave = useCallback(async () => { + try { + await updateMut.mutateAsync({ + defaultMaxIterations: maxIter, + defaultMaxWallClockMin: maxMin, + maxConcurrentTasks: maxConcurrent, + sessionRetentionDays: retentionDays, + defaultClaudeModel: toPersistedModel(defaultModel), + defaultClaudeEffort: toPersistedEffort(defaultEffort), + defaultAgentKind, + defaultCodexModel: toPersistedCodexModel(defaultCodexModel), + defaultCodexEffort: toPersistedCodexEffort(defaultCodexEffort), + defaultCrushModel: toPersistedCrushModel(defaultCrushModel), + }); + await utils.todoAgent.settings.get.invalidate(); + toast.success("設定を保存しました"); + } catch (error) { + toast.error( + error instanceof Error ? error.message : "保存に失敗しました", + ); + } + }, [ + defaultAgentKind, + defaultCodexEffort, + defaultCodexModel, + defaultCrushModel, + defaultEffort, + defaultModel, + maxIter, + maxMin, + maxConcurrent, + retentionDays, + updateMut, + utils, + ]); + + return ( +
+
+
+ + setMaxIter(Number(e.target.value) || 1)} + className="w-32" + /> +

+ 新規 TODO 作成時のデフォルト値。各セッションで個別に変更可。 +

+
+
+ + setMaxMin(Number(e.target.value) || 1)} + className="w-32" + /> +

+ 壁時計上限。この時間を超えるとセッションはエスカレートされる。 +

+
+
+ + setMaxConcurrent(Number(e.target.value) || 1)} + className="w-32" + /> +

+ 同時に実行する TODO セッションの上限。超えた分はキューで待機。 +

+
+
+ + + setRetentionDays(Math.max(0, Number(e.target.value) || 0)) + } + className="w-32" + /> +

+ この日数より古い終了済みセッション (done / failed / aborted / + escalated) をアプリ起動時に自動削除する。0 + で無効(手動削除のみ)。実行中・キュー中のセッションは対象外。 +

+
+
+ + +

+ 新規に作る TODO + やスケジュールのフォームに初期値として反映される。個別に上書き可。既存の + TODO / スケジュールには影響しない。 +

+
+
+ +
+
+
+ ); +} + +function PresetsTab({ open }: { open: boolean }) { + const utils = electronTrpc.useUtils(); + const { data: presets } = electronTrpc.todoAgent.presets.list.useQuery( + undefined, + { enabled: open }, + ); + + const { data: projects } = electronTrpc.projects.getRecents.useQuery(); + + const [selectedId, setSelectedId] = useState(null); + const [draft, setDraft] = useState<{ + id: string | null; + name: string; + content: string; + kind: PresetKind; + workspaceId: string | null; + }>({ + id: null, + name: "", + content: "", + kind: "system", + workspaceId: null, + }); + const [confirmingDelete, setConfirmingDelete] = useState(false); + + const createMut = electronTrpc.todoAgent.presets.create.useMutation(); + const updateMut = electronTrpc.todoAgent.presets.update.useMutation(); + const deleteMut = electronTrpc.todoAgent.presets.delete.useMutation(); + + const invalidate = useCallback( + () => utils.todoAgent.presets.list.invalidate(), + [utils], + ); + + const selected = useMemo( + () => + (presets ?? []).find( + (p: SelectTodoPromptPreset) => p.id === selectedId, + ) ?? null, + [presets, selectedId], + ); + + // Sync draft with selection changes. + useEffect(() => { + if (selected) { + setDraft({ + id: selected.id, + name: selected.name, + content: selected.content, + kind: selected.kind ?? "system", + workspaceId: selected.workspaceId ?? null, + }); + } else { + setDraft({ + id: null, + name: "", + content: "", + kind: "system", + workspaceId: null, + }); + } + setConfirmingDelete(false); + }, [selected]); + + const dirty = + !!draft.name.trim() && + !!draft.content.trim() && + (!selected || + draft.name !== selected.name || + draft.content !== selected.content || + draft.kind !== (selected.kind ?? "system") || + draft.workspaceId !== (selected.workspaceId ?? null)); + + const handleNew = useCallback(() => { + setSelectedId(null); + setDraft({ + id: null, + name: "", + content: "", + kind: "system", + workspaceId: null, + }); + }, []); + + const handleSave = useCallback(async () => { + try { + if (draft.id) { + const row = await updateMut.mutateAsync({ + id: draft.id, + name: draft.name.trim(), + content: draft.content.trim(), + kind: draft.kind, + workspaceId: draft.workspaceId, + }); + setSelectedId(row.id); + toast.success("テンプレートを更新しました"); + } else { + const row = await createMut.mutateAsync({ + name: draft.name.trim(), + content: draft.content.trim(), + kind: draft.kind, + workspaceId: draft.workspaceId ?? undefined, + }); + setSelectedId(row.id); + toast.success("テンプレートを作成しました"); + } + await invalidate(); + } catch (error) { + toast.error( + error instanceof Error ? error.message : "保存に失敗しました", + ); + } + }, [createMut, draft, invalidate, updateMut]); + + const handleDelete = useCallback(async () => { + if (!draft.id) return; + try { + await deleteMut.mutateAsync({ id: draft.id }); + await invalidate(); + setSelectedId(null); + setConfirmingDelete(false); + toast.success("テンプレートを削除しました"); + } catch (error) { + toast.error( + error instanceof Error ? error.message : "削除に失敗しました", + ); + } + }, [deleteMut, draft.id, invalidate]); + + return ( +
+
+
+ +
+ +
+ {(presets ?? []).length === 0 && ( +

+ まだテンプレートはありません。右上から新規作成してください。 +

+ )} + {(presets ?? []).map((preset: SelectTodoPromptPreset) => { + const kind = preset.kind ?? "system"; + return ( + + ); + })} +
+
+
+ +
+
+
+ + +
+
+ + +
+
+
+ + setDraft((d) => ({ ...d, name: e.target.value }))} + placeholder="例: 日本語で返答" + maxLength={120} + className="rounded-md" + /> +
+
+ +