Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,20 @@ jarvis --http 7070 --code-mode

Can also be toggled in the macOS app under **Settings**.

## Compressed Response Handling

Jarvis supports gzip and zstd compressed responses from HTTP/SSE MCP servers out of the box. The `zstandard` library is bundled so that httpx can natively decode `Content-Encoding: zstd` responses without extra configuration.

## Enterprise networks (Zscaler, corporate proxies)

Jarvis uses [truststore](https://github.com/sethmlarson/truststore) to automatically trust certificates from the OS trust store (macOS Keychain, Windows cert store). This means corporate proxy CA certificates added to your system are picked up without extra configuration.

The build scripts also set `UV_NATIVE_TLS=1` so that `uv` uses the OS trust store when fetching packages from PyPI. If you're running `uv` commands manually and hitting SSL errors, set this in your shell:

```bash
export UV_NATIVE_TLS=1
```

## CLI reference

```
Expand Down
5 changes: 5 additions & 0 deletions macOs/Jarvis/Jarvis/Services/ProcessManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,11 @@ class ProcessManager: ObservableObject {
if !FileManager.default.fileExists(atPath: url.path) {
FileManager.default.createFile(atPath: url.path, contents: nil)
}
// Truncate once at startup so stale content from previous runs is cleared.
if let handle = try? FileHandle(forWritingTo: url) {
handle.truncateFile(atOffset: 0)
handle.closeFile()
}
}

private func logHandle(for url: URL) -> FileHandle? {
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ dependencies = [
"mcp>=1.0",
"py-key-value-aio[disk]",
"textual>=0.83",
"truststore>=0.9",
"zstandard>=0.23",
]

[dependency-groups]
Expand Down
4 changes: 4 additions & 0 deletions scripts/build_jarvis_binary.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
#!/usr/bin/env bash
set -euo pipefail

# Use the OS trust store (macOS Keychain / Windows cert store) so uv can
# reach PyPI behind enterprise proxies like Zscaler.
export UV_NATIVE_TLS=1

command -v uv >/dev/null 2>&1 || { echo "ERROR: uv is not installed. See https://docs.astral.sh/uv/"; exit 1; }

REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
Expand Down
3 changes: 3 additions & 0 deletions scripts/build_jarvis_binary_linux.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
#!/usr/bin/env bash
set -euo pipefail

# Use the OS trust store so uv can reach PyPI behind enterprise proxies.
export UV_NATIVE_TLS=1

command -v uv >/dev/null 2>&1 || { echo "ERROR: uv is not installed. See https://docs.astral.sh/uv/"; exit 1; }

REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
Expand Down
57 changes: 54 additions & 3 deletions src/jarvis/__main__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
import asyncio
# Inject the system trust store (macOS Keychain, Windows cert store, etc.)
# so that corporate proxies like Zscaler work out of the box.
# This MUST run before any other imports that might create SSL contexts.
import sys

try:
import truststore

truststore.inject_into_ssl()
except ImportError:
pass

import asyncio
import logging
from contextlib import asynccontextmanager
from pathlib import Path

Expand All @@ -18,6 +30,30 @@
from jarvis.middleware import AuthErrorMiddleware
from jarvis.api import start_api_thread

# ── Logging setup ─────────────────────────────────────────────────────────────
# The macOS app captures stderr → ~/.jarvis/jarvis.log, so all logging goes
# to stderr. Rich is already installed (FastMCP dependency) and gives us
# pretty, timestamped output that matches the existing log format.

log = logging.getLogger("jarvis")
log.setLevel(logging.INFO)

if not log.handlers:
try:
from rich.logging import RichHandler

handler = RichHandler(
show_path=True,
rich_tracebacks=True,
tracebacks_show_locals=False,
)
except ImportError:
handler = logging.StreamHandler(sys.stderr)
handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)-8s %(name)s: %(message)s")
)
log.addHandler(handler)


# Priority: --config flag > active preset in presets.json > ~/.jarvis/servers.json
config_path = active_config_from_presets()
Expand Down Expand Up @@ -95,8 +131,18 @@ def build_mcp(cfg_path: Path, name: str) -> FastMCP:
disabled = get_disabled_tools(cfg_path)
cfg = MCPConfig.model_validate(mcp_dict)
configure_servers(cfg)

server_names = list(cfg.mcpServers.keys())
log.info(
"Loading config %s — %d server(s): %s",
cfg_path,
len(server_names),
", ".join(server_names) or "(none)",
)

m = build_proxy(cfg, name)
if disabled:
log.info("Disabled tools: %s", ", ".join(sorted(disabled)))
m.disable(names=disabled)
m.add_middleware(AuthErrorMiddleware(raw_servers))
m.add_transform(CodeMode() if code_mode else BM25SearchTransform(max_results=5))
Expand Down Expand Up @@ -133,6 +179,7 @@ def build_mcp(cfg_path: Path, name: str) -> FastMCP:
# sessions — no session disruption, no reconnect required.

initial_inner = build_mcp(config_path, "jarvis-proxy")
log.info("Starting HTTP mode — MCP on :%d, API on :%d", port, port + 1)
outer_mcp = FastMCP("jarvis")
swappable_provider = FastMCPProvider(initial_inner)
outer_mcp.add_provider(swappable_provider)
Expand Down Expand Up @@ -215,9 +262,10 @@ async def on_config_reload() -> None:
try:
new_inner = build_mcp(new_cfg, "jarvis-proxy")
except Exception as exc:
print(f"Warning: could not reload config: {exc}", file=sys.stderr)
log.error("Config reload failed: %s", exc)
return
swappable_provider.server = new_inner
log.info("Config reloaded from %s", new_cfg)
await broadcast_tools_changed()

async def on_tool_toggle(server: str, tool: str, enabled: bool) -> None:
Expand All @@ -230,8 +278,10 @@ async def on_tool_toggle(server: str, tool: str, enabled: bool) -> None:
names = {f"{server}_{tool}"}
if enabled:
inner.enable(names=names)
log.info("Enabled tool %s_%s", server, tool)
else:
inner.disable(names=names)
log.info("Disabled tool %s_%s", server, tool)
await broadcast_tools_changed()

def config_reload_cb() -> None:
Expand All @@ -255,12 +305,13 @@ def tool_toggle_cb(server: str, tool: str, enabled: bool) -> None:
timeout_graceful_shutdown=2,
lifespan="on",
ws="websockets-sansio",
log_level="error",
log_level="info",
)
await uvicorn.Server(cfg).serve()

asyncio.run(run_http())

else:
mcp = build_mcp(config_path, "jarvis")
log.info("Starting stdio mode")
mcp.run(show_banner=False)
8 changes: 8 additions & 0 deletions src/jarvis/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,15 @@
from __future__ import annotations

import asyncio
import logging

import mcp.types as mt
from fastmcp.exceptions import ToolError
from fastmcp.server.middleware import Middleware, MiddlewareContext
from fastmcp.tools.base import ToolResult

log = logging.getLogger("jarvis.middleware")

AUTH_MARKERS = ("401", "unauthorized")
REFRESH_TIMEOUT = 5.0 # seconds; silent refresh via refresh_token should be instant

Expand Down Expand Up @@ -78,8 +81,12 @@ async def on_call_tool(
raise

if srv_config.get("auth") == "oauth":
log.info(
"Auth error on '%s', attempting OAuth token refresh", server_name
)
refreshed = await self.try_refresh(server_name, srv_config)
if refreshed:
log.info("OAuth token refreshed for '%s'", server_name)
raise ToolError(
f"{error_text}\n\n"
f"The OAuth token for '{server_name}' has been refreshed. "
Expand All @@ -93,6 +100,7 @@ async def on_call_tool(
) from exc

# Non-OAuth server (e.g. stdio with GITLAB_TOKEN env var).
log.warning("Auth error on non-OAuth server '%s'", server_name)
raise ToolError(
f"{error_text}\n\n"
f"Authentication failed for '{server_name}'. "
Expand Down
29 changes: 27 additions & 2 deletions src/jarvis/proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

from __future__ import annotations

import logging

from fastmcp.mcp_config import MCPConfig, StdioMCPServer
from fastmcp.server import FastMCP
from fastmcp.server.providers.proxy import (
Expand All @@ -15,6 +17,14 @@
StatefulProxyClient,
)

log = logging.getLogger("jarvis.proxy")

# Timeout (seconds) for a backend MCP server to complete its initial
# handshake. Without this, a single unreachable backend (e.g. SSL
# handshake hanging behind a corporate proxy) blocks the entire
# tools/list response indefinitely.
BACKEND_INIT_TIMEOUT = 10


def build_proxy(config: MCPConfig, name: str = "jarvis") -> FastMCP:
"""Build a FastMCP proxy server from an MCPConfig.
Expand All @@ -26,6 +36,9 @@ def build_proxy(config: MCPConfig, name: str = "jarvis") -> FastMCP:
- HTTP/SSE servers get a ``ProxyClient`` with ``new`` as the factory,
giving a fresh connection per request (stateless, correct for HTTP).

All clients are given an ``init_timeout`` so that unreachable backends
fail fast instead of blocking the entire tools/list response.

Args:
config: Validated MCPConfig with servers already configured
(OAuth injected, env vars expanded).
Expand All @@ -44,11 +57,23 @@ def build_proxy(config: MCPConfig, name: str = "jarvis") -> FastMCP:
transport = server.to_transport()

if isinstance(server, StdioMCPServer):
client = StatefulProxyClient(transport)
log.info(
"Backend %-20s stdio (stateful, timeout=%ds)",
server_name,
BACKEND_INIT_TIMEOUT,
)
client = StatefulProxyClient(transport, init_timeout=BACKEND_INIT_TIMEOUT)
mcp._stateful_clients.append(client)
factory = client.new_stateful
else:
client = ProxyClient(transport)
url = getattr(server, "url", "?")
log.info(
"Backend %-20s http %s (timeout=%ds)",
server_name,
url,
BACKEND_INIT_TIMEOUT,
)
client = ProxyClient(transport, init_timeout=BACKEND_INIT_TIMEOUT)
factory = client.new

mcp.add_provider(ProxyProvider(factory), namespace=server_name)
Expand Down
10 changes: 10 additions & 0 deletions tests/unit/test_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,3 +105,13 @@ def capturing_add(self, provider, *, namespace=""):
named = [ns for ns in added if ns]
assert len(named) == 2
assert set(named) == {"gl", "remote"}


def test_zstandard_decoder_available():
"""Verify that httpx can decode zstd responses (zstandard is installed)."""
try:
from httpx._decoders import SUPPORTED_DECODERS

assert "zstd" in SUPPORTED_DECODERS
except ImportError:
pass
Comment on lines +110 to +117
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

🧩 Analysis chain

🌐 Web query:

httpx SUPPORTED_DECODERS public API zstd

💡 Result:

In the httpx library, SUPPORTED_DECODERS is an internal dictionary defined in httpx/_decoders.py that maps Content-Encoding values to decoder classes. It includes "identity", "gzip", "deflate", and conditionally "br" (brotli) and "zstd" (Zstandard). The "zstd" entry is added by default but removed if the 'zstandard' package is not importable. This is not part of the public API. The public API automatically handles Content-Encoding decompression for supported formats when accessing response.content or response.text, without exposing SUPPORTED_DECODERS directly. Official docs do not list SUPPORTED_DECODERS as public. zstd support was added in httpx 0.27.1 (August 2024) via merged PR #3139. Install with pip install 'httpx[zstd]' to include the zstandard dependency. Without it, servers using zstd Content-Encoding will fail decoding (gracefully skipped, but content remains compressed). Example from source: SUPPORTED_DECODERS = { "identity": IdentityDecoder, "gzip": GZipDecoder, "deflate": DeflateDecoder, "br": BrotliDecoder, "zstd": ZStandardDecoder, } if zstandard is None: SUPPORTED_DECODERS.pop("zstd")

Citations:


🏁 Script executed:

# First, locate and examine the test file
fd -t f "test_proxy.py" tests/

Repository: ArtemisMucaj/jarvis

Length of output: 87


🏁 Script executed:

# Check the actual content at lines 110-117
cat -n tests/unit/test_proxy.py | sed -n '100,125p'

Repository: ArtemisMucaj/jarvis

Length of output: 780


🏁 Script executed:

# Check the project configuration for zstandard as a dependency
fd -E "*.egg-info" "(setup.py|setup.cfg|pyproject.toml|requirements.*)" | head -20

Repository: ArtemisMucaj/jarvis

Length of output: 77


🏁 Script executed:

# Read the pyproject.toml to check zstandard dependency status
cat pyproject.toml

Repository: ArtemisMucaj/jarvis

Length of output: 701


🏁 Script executed:

# Also check if there's any special handling or setup for zstandard in conftest or elsewhere
rg "zstandard|zstd" tests/ -i

Repository: ArtemisMucaj/jarvis

Length of output: 301


🏁 Script executed:

# Check if httpx is in the dependencies and what versions are specified
rg "httpx|fastmcp" pyproject.toml

Repository: ArtemisMucaj/jarvis

Length of output: 95


🏁 Script executed:

# Let's also verify: does pytest.importorskip exist and work with packages like zstandard?
rg "importorskip" tests/

Repository: ArtemisMucaj/jarvis

Length of output: 45


Test silently passes when import fails — defeats its purpose.

Swallowing ImportError means that if httpx relocates SUPPORTED_DECODERS (private _decoders module), the test passes without asserting zstd availability. Since zstandard>=0.23 is a hard runtime dependency, the check should be unconditional and explicit. Use pytest.importorskip("zstandard") to make the requirement explicit, then assert unconditionally.

Additionally, this test taps httpx's private _decoders module—consider asserting behavior via httpx's public API (e.g., decoding an actual zstd-encoded response) for better stability.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@tests/unit/test_proxy.py` around lines 110 - 117, The test
test_zstandard_decoder_available currently swallows ImportError and can silently
pass; change it to explicitly require zstandard by calling
pytest.importorskip("zstandard") at the top of the test, then unconditionally
import httpx._decoders and assert "zstd" in SUPPORTED_DECODERS (or better:
exercise httpx public API by decoding a real zstd-encoded response) so the
assertion fails if zstd support is missing; reference the existing symbols
test_zstandard_decoder_available and SUPPORTED_DECODERS and use
pytest.importorskip("zstandard") to enforce the dependency.

Loading
Loading