diff --git a/docs/getting_started_with_claude_code.md b/docs/getting_started_with_claude_code.md
index a7b018bf0..ff6532cff 100644
--- a/docs/getting_started_with_claude_code.md
+++ b/docs/getting_started_with_claude_code.md
@@ -277,6 +277,9 @@ Confirm `~/wren-workspace/target/mdl.json` exists before starting the container.
**Database connection refused inside Docker:**
Change `localhost` / `127.0.0.1` to `host.docker.internal` in your connection credentials.
+**MCP tools fail with "Session not found" after container restart:**
+Start a new Claude Code session. Container restarts invalidate MCP sessions — the client must reconnect.
+
**`wren-generate-mdl` fails because wren-ibis-server is not running:**
Start the container first (Phase 2), then run `/wren-generate-mdl`. wren-ibis-server is available at `http://localhost:8000` once the container is up.
diff --git a/mcp-server/app/templates/_fields.html b/mcp-server/app/templates/_fields.html
index af1de42b8..24fca7e7a 100644
--- a/mcp-server/app/templates/_fields.html
+++ b/mcp-server/app/templates/_fields.html
@@ -3,6 +3,22 @@
{% for field in fields %}
{% if field.type == 'hidden' %}
+ {% elif field.type == 'file_base64' %}
+
+ {{ field.label }}
+
+
+ {% if connection_info.get(field.name) %}
+ ✓ Credentials already configured (upload new file to replace)
+ {% else %}
+
+ {% endif %}
+ {% if field.get('hint') %}
+ {{ field.hint }}
+ {% endif %}
+
{% else %}
{{ field.label }}
@@ -21,6 +37,29 @@
{% endif %}
{% endfor %}
+
{% elif datasource %}
Connection JSON
diff --git a/mcp-server/app/web.py b/mcp-server/app/web.py
index a6001164c..e615bcdd2 100644
--- a/mcp-server/app/web.py
+++ b/mcp-server/app/web.py
@@ -42,9 +42,9 @@
{"name": "format", "label": "Format", "type": "hidden", "value": "duckdb"},
],
"BIGQUERY": [
- {"name": "project", "label": "Project ID", "type": "text", "placeholder": "my-gcp-project"},
- {"name": "dataset", "label": "Dataset", "type": "text", "placeholder": "my_dataset"},
- {"name": "credentials_base64", "label": "Credentials (Base64)", "type": "password", "placeholder": ""},
+ {"name": "project_id", "label": "Project ID", "type": "text", "placeholder": "my-gcp-project"},
+ {"name": "dataset_id", "label": "Dataset", "type": "text", "placeholder": "my_dataset"},
+ {"name": "credentials", "label": "Service Account JSON", "type": "file_base64", "accept": ".json", "hint": "Upload your GCP service account credentials.json file. It will be base64-encoded automatically."},
],
"SNOWFLAKE": [
{"name": "user", "label": "User", "type": "text", "placeholder": ""},
@@ -131,6 +131,11 @@
{"name": "secret_key", "label": "Secret Key", "type": "password", "placeholder": ""},
{"name": "credentials", "label": "Credentials (Base64)", "type": "password", "placeholder": "eyJ..."},
],
+ "DATABRICKS": [
+ {"name": "serverHostname", "label": "Server Hostname", "type": "text", "placeholder": "dbc-xxxxxxxx-xxxx.cloud.databricks.com"},
+ {"name": "httpPath", "label": "HTTP Path", "type": "text", "placeholder": "/sql/1.0/warehouses/xxxxxxxx"},
+ {"name": "accessToken", "label": "Access Token", "type": "password", "placeholder": ""},
+ ],
}
# Callbacks injected by wren.py via init()
@@ -213,6 +218,13 @@ async def post_connection(request: Request):
return HTMLResponse(_msg("✗ Please select a data source.", ok=False))
state = _get_state()
+
+ # Merge with existing connection info so that omitted sensitive fields
+ # (e.g. credentials not re-uploaded) retain their saved values.
+ existing = state.get("connection_info") or {}
+ if existing:
+ merged = {**existing, **conn_info}
+ conn_info = merged
mdl_ds = (state.get("data_source") or "").upper()
if state.get("is_deployed") and mdl_ds and mdl_ds != ds:
return HTMLResponse(
diff --git a/mcp-server/app/wren.py b/mcp-server/app/wren.py
index 3962e2345..7954f636e 100644
--- a/mcp-server/app/wren.py
+++ b/mcp-server/app/wren.py
@@ -146,13 +146,18 @@ def _save_settings() -> None:
_load_settings()
if mdl_path:
- with open(mdl_path) as f:
- mdl_schema = json.load(f)
- data_source = mdl_schema["dataSource"].lower()
- mdl_cache.set_mdl(dict_to_base64_string(mdl_schema))
- models = mdl_schema.get("models", [])
- total_columns = sum(len(m.get("columns", [])) for m in models)
- print(f"Loaded MDL {f.name} ({len(models)} models, {total_columns} columns)") # noqa: T201
+ try:
+ with open(mdl_path) as f:
+ mdl_schema = json.load(f)
+ data_source = mdl_schema.get("dataSource", "").lower() or None
+ mdl_cache.set_mdl(dict_to_base64_string(mdl_schema))
+ models = mdl_schema.get("models", [])
+ total_columns = sum(len(m.get("columns", [])) for m in models)
+ print(f"Loaded MDL {f.name} ({len(models)} models, {total_columns} columns)") # noqa: T201
+ except FileNotFoundError:
+ print(f"MDL file not found at {mdl_path} — starting without a loaded MDL") # noqa: T201
+ except (json.JSONDecodeError, KeyError) as e:
+ print(f"Failed to parse MDL file {mdl_path}: {e} — starting without a loaded MDL") # noqa: T201
else:
print("No MDL_PATH environment variable found")
diff --git a/skills/index.json b/skills/index.json
index 499019ad8..4f1ca26c0 100644
--- a/skills/index.json
+++ b/skills/index.json
@@ -68,7 +68,7 @@
},
{
"name": "wren-mcp-setup",
- "version": "1.3",
+ "version": "1.4",
"description": "Set up Wren Engine MCP server via Docker and register it with an AI agent.",
"tags": [
"wren",
diff --git a/skills/versions.json b/skills/versions.json
index 02536239f..67b174e04 100644
--- a/skills/versions.json
+++ b/skills/versions.json
@@ -3,7 +3,7 @@
"wren-connection-info": "1.5",
"wren-project": "1.5",
"wren-sql": "1.0",
- "wren-mcp-setup": "1.3",
+ "wren-mcp-setup": "1.4",
"wren-quickstart": "1.3",
"wren-http-api": "1.0",
"wren-usage": "1.2"
diff --git a/skills/wren-connection-info/SKILL.md b/skills/wren-connection-info/SKILL.md
index f8ba45440..abbdb6a82 100644
--- a/skills/wren-connection-info/SKILL.md
+++ b/skills/wren-connection-info/SKILL.md
@@ -45,10 +45,10 @@ Read the linked reference file for the user's data source to get required fields
Most database connectors need: `host`, `port`, `user`, `password`, `database`.
Exceptions:
-- **BigQuery** — uses `project_id`, `dataset_id`, `credentials_json_string` (base64-encoded). See [databases.md](references/databases.md) for encoding instructions.
-- **Snowflake** — uses `account` instead of `host`, plus `sf_schema`.
+- **BigQuery** — uses `project_id`, `dataset_id`, `credentials` (base64-encoded). See [databases.md](references/databases.md) for encoding instructions.
+- **Snowflake** — uses `account` instead of `host`, plus `schema`.
- **Trino** — needs `catalog` and `schema` instead of `database`.
-- **Databricks** — uses `server_hostname`, `http_path`, `access_token` (or service principal).
+- **Databricks** — uses `serverHostname`, `httpPath`, `accessToken` (or service principal with `clientId`, `clientSecret`).
- **Spark** — only `host` and `port` (Spark Connect protocol, no auth fields).
- **File sources** — use `url`, `format`, plus bucket/credentials. See [file-sources.md](references/file-sources.md).
@@ -73,11 +73,11 @@ Never log, display, or pass sensitive values through the AI agent unnecessarily.
| Connector | Sensitive fields |
|-----------|-----------------|
| Postgres / MySQL / MSSQL / ClickHouse / Oracle / Doris / Redshift | `password` |
-| BigQuery | `credentials_json_string` |
+| BigQuery | `credentials` |
| Snowflake | `password` |
| Athena | `aws_access_key_id`, `aws_secret_access_key` |
-| Databricks (token) | `access_token` |
-| Databricks (service principal) | `client_id`, `client_secret` |
+| Databricks (token) | `accessToken` |
+| Databricks (service principal) | `clientId`, `clientSecret` |
| S3 / MinIO | `access_key`, `secret_key` |
| GCS | `key_id`, `secret_key`, `credentials` |
| Trino / Spark / Local files | (none) |
diff --git a/skills/wren-connection-info/references/databases.md b/skills/wren-connection-info/references/databases.md
index 2872525a6..9bb9921e5 100644
--- a/skills/wren-connection-info/references/databases.md
+++ b/skills/wren-connection-info/references/databases.md
@@ -57,7 +57,7 @@ Set `redshift_type` to `"redshift_iam"` in the connection info to use IAM auth.
|-------|-------------|-----------|
| `project_id` | GCP project ID | |
| `dataset_id` | Dataset name | |
-| `credentials_json_string` | Base64-encoded service account JSON | ✓ |
+| `credentials` | Base64-encoded service account JSON | ✓ |
**BigQuery credentials encoding**: Wren requires the service account JSON as a **base64-encoded string**, not the raw file.
After downloading `credentials.json` from GCP, run:
@@ -69,7 +69,7 @@ base64 -i credentials.json | tr -d '\n'
base64 -w 0 credentials.json
```
-Paste the output into the `credentials_json_string` field.
+Paste the output into the `credentials` field. The Web UI also supports uploading the JSON file directly.
---
@@ -81,7 +81,7 @@ Paste the output into the `credentials_json_string` field.
| `password` | Password | ✓ |
| `account` | Account identifier | |
| `database` | Database name | |
-| `sf_schema` | Schema name | |
+| `schema` | Schema name | |
---
@@ -101,7 +101,7 @@ Paste the output into the `credentials_json_string` field.
| Field | Description | Sensitive |
|-------|-------------|-----------|
| `s3_staging_dir` | S3 staging directory (`s3://bucket/prefix/`) | |
-| `region` | AWS region | |
+| `region_name` | AWS region | |
| `aws_access_key_id` | AWS access key ID | ✓ |
| `aws_secret_access_key` | AWS secret access key | ✓ |
@@ -111,19 +111,19 @@ Paste the output into the `credentials_json_string` field.
| Field | Description | Sensitive |
|-------|-------------|-----------|
-| `server_hostname` | Workspace hostname (e.g. `dbc-xxx.cloud.databricks.com`) | |
-| `http_path` | SQL warehouse HTTP path (e.g. `/sql/1.0/warehouses/xxx`) | |
-| `access_token` | Personal access token | ✓ |
+| `serverHostname` | Workspace hostname (e.g. `dbc-xxx.cloud.databricks.com`) | |
+| `httpPath` | SQL warehouse HTTP path (e.g. `/sql/1.0/warehouses/xxx`) | |
+| `accessToken` | Personal access token | ✓ |
## Databricks (service principal)
| Field | Description | Sensitive |
|-------|-------------|-----------|
-| `server_hostname` | Workspace hostname | |
-| `http_path` | SQL warehouse HTTP path | |
-| `client_id` | OAuth M2M client ID | ✓ |
-| `client_secret` | OAuth M2M client secret | ✓ |
-| `azure_tenant_id` | Azure AD tenant ID (Azure Databricks only) | |
+| `serverHostname` | Workspace hostname | |
+| `httpPath` | SQL warehouse HTTP path | |
+| `clientId` | OAuth M2M client ID | ✓ |
+| `clientSecret` | OAuth M2M client secret | ✓ |
+| `azureTenantId` | Azure AD tenant ID (Azure Databricks only) | |
---
diff --git a/skills/wren-mcp-setup/SKILL.md b/skills/wren-mcp-setup/SKILL.md
index 7c0798a45..1d525a47d 100644
--- a/skills/wren-mcp-setup/SKILL.md
+++ b/skills/wren-mcp-setup/SKILL.md
@@ -5,7 +5,7 @@ compatibility: Requires Docker Desktop (or Docker Engine).
license: Apache-2.0
metadata:
author: wren-engine
- version: "1.3"
+ version: "1.4"
---
# Set Up Wren MCP via Docker
@@ -132,7 +132,7 @@ docker run -d \
ghcr.io/canner/wren-engine-ibis:latest
```
-> If `MDL_PATH` is not set (or the file doesn't exist yet), the container starts without a loaded MDL. You can deploy later using the `deploy` MCP tool or the Web UI.
+> If `MDL_PATH` is not set or the file doesn't exist yet, the container starts without a loaded MDL. You can deploy later using the `deploy` MCP tool or the Web UI.
This starts the container using the image `ghcr.io/canner/wren-engine-ibis:latest` with:
@@ -316,7 +316,11 @@ claude mcp add --transport http wren http://localhost:19000/mcp
```
A `405 Method Not Allowed` response means the endpoint is reachable but expects a POST — that is normal and indicates the MCP server is up.
-### 4. Database connection refused inside the container
+### 4. MCP tools fail with "Session not found" after container restart
+
+Container restarts invalidate all active MCP sessions. The AI client still holds the old session ID, so every MCP call returns `"Session not found"`. **Start a new Claude Code session** (or restart your MCP client) to reconnect.
+
+### 5. Database connection refused inside the container
If `health_check()` passes but queries fail with a connection error, the database host is likely still set to `localhost`. Open the Web UI at `http://localhost:9001`, edit the connection info, and change the host to `host.docker.internal`.