diff --git a/.github/actions/run-claude/action.yml b/.github/actions/run-claude/action.yml new file mode 100644 index 0000000000..fff6788a63 --- /dev/null +++ b/.github/actions/run-claude/action.yml @@ -0,0 +1,95 @@ +# Composite Action for running Claude Code Action +# +# Wraps anthropics/claude-code-action with MCP server configuration. +# Template based on elastic/ai-github-actions base action. +# +# Usage: +# - uses: ./.github/actions/run-claude +# with: +# prompt: "Your prompt here" +# claude-oauth-token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} +# github-token: ${{ steps.marvin-token.outputs.token }} +# allowed-tools: "Edit,Read,Write,Bash(*),mcp__github__add_issue_comment" +# +name: "Run Claude" +description: "Run Claude Code with MCP servers" +author: "FastMCP" + +branding: + icon: "cpu" + color: "orange" + +inputs: + prompt: + description: "Prompt to pass to Claude" + required: true + + claude-oauth-token: + description: "Claude Code OAuth token for authentication" + required: true + + github-token: + description: "GitHub token for Claude to operate with" + required: true + + allowed-tools: + description: "Comma-separated list of allowed tools (e.g. Edit,Write,Bash(npm test))" + required: false + default: "" + + model: + description: "Model to use for Claude" + required: false + default: "claude-opus-4-6" + + allowed-bots: + description: "Allowed bot usernames, or '*' for all bots" + required: false + default: "" + + track-progress: + description: "Whether Claude should track progress" + required: false + default: "true" + + mcp-servers: + description: "MCP server configuration JSON" + required: false + default: '{"mcpServers":{"agents-md-generator":{"type":"http","url":"https://agents-md-generator.fastmcp.app/mcp"},"public-code-search":{"type":"http","url":"https://public-code-search.fastmcp.app/mcp"}}}' + + trigger-phrase: + description: "Trigger phrase (for mention workflows)" + required: false + default: "/marvin" + +outputs: + conclusion: + description: "The conclusion of the Claude Code run" + value: ${{ steps.claude.outputs.conclusion }} + +runs: + using: "composite" + steps: + - name: Clean up stale Claude locks + shell: bash + run: rm -rf ~/.claude/.locks ~/.local/state/claude/locks || true + + - name: Run Claude Code + id: claude + env: + GITHUB_TOKEN: ${{ inputs.github-token }} + uses: anthropics/claude-code-action@v1 + with: + github_token: ${{ inputs.github-token }} + claude_code_oauth_token: ${{ inputs.claude-oauth-token }} + bot_name: "Marvin Context Protocol" + trigger_phrase: ${{ inputs.trigger-phrase }} + allowed_bots: ${{ inputs.allowed-bots }} + track_progress: ${{ inputs.track-progress }} + prompt: ${{ inputs.prompt }} + claude_args: | + ${{ (inputs.allowed-tools != '' || inputs.extra-allowed-tools != '') && format('--allowedTools {0}{1}', inputs.allowed-tools, inputs.extra-allowed-tools != '' && format(',{0}', inputs.extra-allowed-tools) || '') || '' }} + ${{ inputs.mcp-servers != '' && format('--mcp-config ''{0}''', inputs.mcp-servers) || '' }} + --model ${{ inputs.model }} + settings: | + {"model": "${{ inputs.model }}"} diff --git a/.github/scripts/mention/gh-get-review-threads.sh b/.github/scripts/mention/gh-get-review-threads.sh new file mode 100755 index 0000000000..2e1f4b35df --- /dev/null +++ b/.github/scripts/mention/gh-get-review-threads.sh @@ -0,0 +1,62 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Get PR review threads with comments via GitHub GraphQL API +# +# Usage: +# gh-get-review-threads.sh [FILTER] +# +# Arguments: +# FILTER - Optional: filter for unresolved threads from specific author +# +# Environment (set by composite action): +# MENTION_REPO - Repository (owner/repo format) +# MENTION_PR_NUMBER - Pull request number +# GITHUB_TOKEN - GitHub API token +# +# Output: +# JSON array of review threads with nested comments + +# Parse OWNER and REPO from MENTION_REPO +REPO_FULL="${MENTION_REPO:?MENTION_REPO environment variable is required}" +OWNER="${REPO_FULL%/*}" +REPO="${REPO_FULL#*/}" +PR_NUMBER="${MENTION_PR_NUMBER:?MENTION_PR_NUMBER environment variable is required}" +FILTER="${1:-}" + +gh api graphql -f query=' + query($owner: String!, $repo: String!, $prNumber: Int!) { + repository(owner: $owner, name: $repo) { + pullRequest(number: $prNumber) { + reviewThreads(first: 100) { + nodes { + id + isResolved + isOutdated + path + line + comments(first: 50) { + nodes { + id + body + author { login } + createdAt + } + } + } + } + } + } + }' -F owner="$OWNER" \ + -F repo="$REPO" \ + -F prNumber="$PR_NUMBER" \ + --jq '.data.repository.pullRequest.reviewThreads.nodes' | \ +if [ -n "$FILTER" ]; then + jq --arg author "$FILTER" ' + map(select( + .isResolved == false and + .comments.nodes | any(.author.login == $author) + ))' +else + cat +fi diff --git a/.github/scripts/mention/gh-resolve-review-thread.sh b/.github/scripts/mention/gh-resolve-review-thread.sh new file mode 100755 index 0000000000..5dc08c2391 --- /dev/null +++ b/.github/scripts/mention/gh-resolve-review-thread.sh @@ -0,0 +1,61 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Resolve a GitHub PR review thread, optionally posting a comment first +# +# Usage: +# gh-resolve-review-thread.sh THREAD_ID [COMMENT] +# +# Arguments: +# THREAD_ID - The GraphQL node ID of the review thread to resolve +# COMMENT - Optional: Comment body to post before resolving +# +# Environment (set by composite action): +# MENTION_REPO - Repository (owner/repo format) +# MENTION_PR_NUMBER - Pull request number +# GITHUB_TOKEN - GitHub API token +# +# Behavior: +# 1. If COMMENT is provided, posts it as a reply to the thread +# 2. Resolves the thread + +# Validate required environment variables +: "${MENTION_REPO:?MENTION_REPO environment variable is required}" +: "${MENTION_PR_NUMBER:?MENTION_PR_NUMBER environment variable is required}" +THREAD_ID="${1:?Thread ID required}" +COMMENT="${2:-}" + +# Step 1: Post comment if provided +if [ -n "$COMMENT" ]; then + echo "Posting comment to thread..." >&2 + COMMENT_RESULT=$(gh api graphql -f query=' + mutation($threadId: ID!, $body: String!) { + addPullRequestReviewThreadReply(input: { + pullRequestReviewThreadId: $threadId, + body: $body + }) { + comment { + id + } + } + }' -f threadId="$THREAD_ID" -f body="$COMMENT") + if echo "$COMMENT_RESULT" | jq -e '.errors' > /dev/null 2>&1; then + echo "Error posting comment: $COMMENT_RESULT" >&2 + exit 1 + fi +fi + +# Step 2: Resolve the thread +echo "Resolving thread..." >&2 +RESOLVE_RESULT=$(gh api graphql -f query=' + mutation($threadId: ID!) { + resolveReviewThread(input: {threadId: $threadId}) { + thread { + id + isResolved + } + } + }' -f threadId="$THREAD_ID" --jq '.data.resolveReviewThread.thread') + +echo "$RESOLVE_RESULT" +echo "βœ“ Thread resolved" >&2 diff --git a/.github/scripts/pr-review/pr-comment.sh b/.github/scripts/pr-review/pr-comment.sh new file mode 100755 index 0000000000..d571f6757e --- /dev/null +++ b/.github/scripts/pr-review/pr-comment.sh @@ -0,0 +1,251 @@ +#!/bin/bash +# pr-comment.sh - Queue a structured inline review comment for the PR review +# +# Usage: +# pr-comment.sh --severity --title --why [suggestion via stdin] +# pr-comment.sh --severity --title --why --no-suggestion +# +# Arguments: +# file File path (required) +# line Line number (required) +# --severity Severity level: critical, high, medium, low, nitpick (required) +# --title Brief description for comment heading (required) +# --why One sentence explaining the risk/impact (required) +# --no-suggestion Explicitly skip suggestion (use for architectural issues) +# +# The suggestion code is read from stdin (use heredoc). If no stdin and no --no-suggestion, errors. +# +# Examples: +# # With suggestion (preferred) +# pr-comment.sh src/main.go 42 --severity high --title "Missing error check" --why "Errors are silently ignored" <<'EOF' +# if err != nil { +# return fmt.Errorf("operation failed: %w", err) +# } +# EOF +# +# # Without suggestion (for issues requiring broader changes) +# pr-comment.sh src/main.go 42 --severity medium --title "Consider extracting to function" \ +# --why "This logic is duplicated in 3 places" --no-suggestion +# +# Environment variables (set by the composite action): +# PR_REVIEW_REPO - Repository (owner/repo) +# PR_REVIEW_PR_NUMBER - Pull request number +# PR_REVIEW_COMMENTS_DIR - Directory to cache comments (default: /tmp/pr-review-comments) + +set -e + +# Configuration from environment +REPO="${PR_REVIEW_REPO:?PR_REVIEW_REPO environment variable is required}" +PR_NUMBER="${PR_REVIEW_PR_NUMBER:?PR_REVIEW_PR_NUMBER environment variable is required}" +COMMENTS_DIR="${PR_REVIEW_COMMENTS_DIR:-/tmp/pr-review-comments}" + +# Severity emoji mapping +declare -A SEVERITY_EMOJI=( + [critical]="πŸ”΄ CRITICAL" + [high]="🟠 HIGH" + [medium]="🟑 MEDIUM" + [low]="βšͺ LOW" + [nitpick]="πŸ’¬ NITPICK" +) + +# Parse arguments +FILE="" +LINE="" +SEVERITY="" +TITLE="" +WHY="" +NO_SUGGESTION=false + +# First two positional args are file and line +if [ $# -lt 2 ]; then + echo "Error: file and line are required" + echo "Usage: pr-comment.sh --severity --title --why [<<'EOF' ... EOF]" + exit 1 +fi + +FILE="$1" +LINE="$2" +shift 2 + +# Parse named arguments +while [ $# -gt 0 ]; do + case "$1" in + --severity) + SEVERITY="$2" + shift 2 + ;; + --title) + TITLE="$2" + shift 2 + ;; + --why) + WHY="$2" + shift 2 + ;; + --no-suggestion) + NO_SUGGESTION=true + shift + ;; + *) + echo "Error: Unknown argument: $1" + exit 1 + ;; + esac +done + +# Read suggestion from stdin if available +SUGGESTION="" +if [ ! -t 0 ]; then + SUGGESTION=$(cat) +fi + +# Validate required arguments +if [ -z "$SEVERITY" ]; then + echo "Error: --severity is required (critical, high, medium, low, nitpick)" + exit 1 +fi + +if [ -z "$TITLE" ]; then + echo "Error: --title is required" + exit 1 +fi + +if [ -z "$WHY" ]; then + echo "Error: --why is required" + exit 1 +fi + +# Validate severity level +if [ -z "${SEVERITY_EMOJI[$SEVERITY]}" ]; then + echo "Error: Invalid severity '$SEVERITY'. Must be one of: critical, high, medium, low, nitpick" + exit 1 +fi + +# Require either suggestion or explicit --no-suggestion +if [ -z "$SUGGESTION" ] && [ "$NO_SUGGESTION" = false ]; then + echo "Error: Suggestion required. Provide code via stdin (heredoc) or use --no-suggestion" + echo "" + echo "Example with suggestion:" + echo " pr-comment.sh file.go 42 --severity high --title \"desc\" --why \"reason\" <<'EOF'" + echo " fixed code here" + echo " EOF" + echo "" + echo "Example without suggestion:" + echo " pr-comment.sh file.go 42 --severity medium --title \"desc\" --why \"reason\" --no-suggestion" + exit 1 +fi + +# Validate line is a positive integer (>= 1) +if ! [[ "$LINE" =~ ^[1-9][0-9]*$ ]]; then + echo "Error: Line number must be a positive integer (>= 1), got: $LINE" + exit 1 +fi + +# Get the diff for this file to validate the comment location +DIFF_DATA=$(gh api "repos/${REPO}/pulls/${PR_NUMBER}/files" --paginate | jq --arg f "$FILE" '.[] | select(.filename==$f)') + +if [ -z "$DIFF_DATA" ]; then + echo "Error: File '${FILE}' not found in PR diff" + echo "" + echo "Files changed in this PR:" + gh api "repos/${REPO}/pulls/${PR_NUMBER}/files" --paginate --jq '.[].filename' + exit 1 +fi + +PATCH=$(echo "$DIFF_DATA" | jq -r '.patch // empty') + +if [ -z "$PATCH" ]; then + echo "Error: No patch data for file '${FILE}' (file may be binary or too large)" + exit 1 +fi + +# Verify the line exists in the diff +LINE_IN_DIFF=$(echo "$PATCH" | awk -v target_line="$LINE" ' +BEGIN { current_line = 0; found = 0 } +/^@@/ { + line = $0 + gsub(/.*\+/, "", line) + gsub(/[^0-9].*/, "", line) + current_line = line - 1 + next +} +{ + if (substr($0, 1, 1) != "-") { + current_line++ + if (current_line == target_line) { + found = 1 + exit + } + } +} +END { if (found) print "1"; else print "0" } +') + +if [ "$LINE_IN_DIFF" != "1" ]; then + echo "Error: Line ${LINE} not found in the diff for '${FILE}'" + echo "" + echo "Note: You can only comment on lines that appear in the diff (added, modified, or context lines)" + echo "" + echo "First 50 lines of diff for this file:" + echo "$PATCH" | head -50 + exit 1 +fi + +# Create comments directory if it doesn't exist +mkdir -p "${COMMENTS_DIR}" + +# Assemble the comment body +SEVERITY_LABEL="${SEVERITY_EMOJI[$SEVERITY]}" + +BODY="**${SEVERITY_LABEL}** ${TITLE} + +Why: ${WHY}" + +# Add suggestion block if provided +if [ -n "$SUGGESTION" ]; then + BODY="${BODY} + +\`\`\`suggestion +${SUGGESTION} +\`\`\`" +fi + +# Append standard footer +FOOTER=' + +--- +Marvin Context Protocol | Type `/marvin` to interact further + +Give us feedback! React with πŸš€ if perfect, πŸ‘ if helpful, πŸ‘Ž if not.' + +BODY_WITH_FOOTER="${BODY}${FOOTER}" + +# Generate unique comment ID +COMMENT_ID="comment-$(date +%s)-$(od -An -N4 -tu4 /dev/urandom | tr -d ' ')" +COMMENT_FILE="${COMMENTS_DIR}/${COMMENT_ID}.json" + +# Create the comment JSON object +jq -n \ + --arg path "$FILE" \ + --argjson line "$LINE" \ + --arg side "RIGHT" \ + --arg body "$BODY_WITH_FOOTER" \ + --arg id "$COMMENT_ID" \ + '{ + path: $path, + line: $line, + side: $side, + body: $body, + _meta: { + id: $id, + file: $path, + line: $line + } + }' > "${COMMENT_FILE}" + +echo "βœ“ Queued review comment for ${FILE}:${LINE}" +echo " Severity: ${SEVERITY_LABEL}" +echo " Title: ${TITLE}" +echo " Comment ID: ${COMMENT_ID}" +echo " Comment will be submitted with pr-review.sh" +echo " Remove with: pr-remove-comment.sh ${FILE} ${LINE}" diff --git a/.github/scripts/pr-review/pr-diff.sh b/.github/scripts/pr-review/pr-diff.sh new file mode 100755 index 0000000000..4448e0012b --- /dev/null +++ b/.github/scripts/pr-review/pr-diff.sh @@ -0,0 +1,128 @@ +#!/bin/bash +# pr-diff.sh - Show changed files or diff for a specific file +# +# Usage: +# pr-diff.sh - List all changed files (shows full diff if small enough) +# pr-diff.sh - Show diff for a specific file with line numbers +# +# Environment variables (set by the composite action): +# PR_REVIEW_REPO - Repository (owner/repo) +# PR_REVIEW_PR_NUMBER - Pull request number + +set -e + +# Configuration from environment +REPO="${PR_REVIEW_REPO:?PR_REVIEW_REPO environment variable is required}" +PR_NUMBER="${PR_REVIEW_PR_NUMBER:?PR_REVIEW_PR_NUMBER environment variable is required}" +EXPECTED_HEAD="${PR_REVIEW_HEAD_SHA:-}" + +# Check if HEAD has changed since review started (race condition detection) +if [ -n "$EXPECTED_HEAD" ]; then + CURRENT_HEAD=$(gh api "repos/${REPO}/pulls/${PR_NUMBER}" --jq '.head.sha') + if [ "$CURRENT_HEAD" != "$EXPECTED_HEAD" ]; then + echo "⚠️ WARNING: PR head has changed since review started!" + echo " Review started at: ${EXPECTED_HEAD:0:7}" + echo " Current head: ${CURRENT_HEAD:0:7}" + echo " Line numbers below may not match the commit being reviewed." + echo "" + fi +fi + +# Thresholds for "too big" - show file list only if exceeded +MAX_FILES=25 +MAX_TOTAL_LINES=1500 + +FILE="$1" + +# Function to add line numbers to a patch +# Format: [LINE] +added | [LINE] context | [----] -deleted +add_line_numbers() { + awk ' + BEGIN { new_line = 0 } + /^@@/ { + # Parse hunk header: @@ -old_start,old_count +new_start,new_count @@ + match($0, /\+([0-9]+)/) + new_line = substr($0, RSTART+1, RLENGTH-1) - 1 + print "" + print $0 + next + } + /^-/ { + # Deleted line - cannot comment on these + printf "[----] %s\n", $0 + next + } + /^\+/ { + # Added line - can comment, show line number + new_line++ + printf "[%4d] %s\n", new_line, $0 + next + } + { + # Context line (space prefix) - can comment, show line number + new_line++ + printf "[%4d] %s\n", new_line, $0 + } + ' +} + +if [ -z "$FILE" ]; then + # Get file list with stats + FILES_DATA=$(gh api "repos/${REPO}/pulls/${PR_NUMBER}/files" --paginate) + + FILE_COUNT=$(echo "$FILES_DATA" | jq 'length') + TOTAL_ADDITIONS=$(echo "$FILES_DATA" | jq '[.[].additions] | add // 0') + TOTAL_DELETIONS=$(echo "$FILES_DATA" | jq '[.[].deletions] | add // 0') + TOTAL_LINES=$((TOTAL_ADDITIONS + TOTAL_DELETIONS)) + + echo "PR #${PR_NUMBER} Summary: ${FILE_COUNT} files changed (+${TOTAL_ADDITIONS}/-${TOTAL_DELETIONS})" + echo "" + + # Check if diff is too large + if [ "$FILE_COUNT" -gt "$MAX_FILES" ] || [ "$TOTAL_LINES" -gt "$MAX_TOTAL_LINES" ]; then + echo "⚠️ Large diff detected (>${MAX_FILES} files or >${MAX_TOTAL_LINES} lines changed)" + echo " Review files individually using: pr-diff.sh " + echo "" + echo "Files changed:" + echo "$FILES_DATA" | jq -r '.[] | " \(.filename) (+\(.additions)/-\(.deletions))"' + else + # Small enough - show all diffs with line numbers + echo "Files changed:" + echo "$FILES_DATA" | jq -r '.[] | " \(.filename) (+\(.additions)/-\(.deletions))"' + echo "" + echo "─────────────────────────────────────────────────────────────────────" + echo "" + + # Show each file's diff by iterating over indices + for i in $(seq 0 $((FILE_COUNT - 1))); do + FNAME=$(echo "$FILES_DATA" | jq -r ".[$i].filename") + PATCH=$(echo "$FILES_DATA" | jq -r ".[$i].patch // empty") + + if [ -n "$PATCH" ]; then + echo "## ${FNAME}" + echo "Use: pr-comment.sh ${FNAME} --severity --title \"desc\" --why \"reason\" <<'EOF' ... EOF" + echo "Format: [LINE] +added | [LINE] context | [----] -deleted (can't comment)" + echo "$PATCH" | add_line_numbers + echo "" + echo "─────────────────────────────────────────────────────────────────────" + echo "" + fi + done + fi +else + # Show specific file diff + PATCH=$(gh api "repos/${REPO}/pulls/${PR_NUMBER}/files" --paginate --jq --arg file "$FILE" '.[] | select(.filename==$file) | .patch') + + if [ -z "$PATCH" ]; then + echo "Error: File '${FILE}' not found in PR diff" + echo "" + echo "Files changed in this PR:" + gh api "repos/${REPO}/pulls/${PR_NUMBER}/files" --paginate --jq '.[].filename' + exit 1 + fi + + echo "## ${FILE}" + echo "Use: pr-comment.sh ${FILE} --severity --title \"desc\" --why \"reason\" <<'EOF' ... EOF" + echo "Format: [LINE] +added | [LINE] context | [----] -deleted (can't comment)" + echo "$PATCH" | add_line_numbers +fi diff --git a/.github/scripts/pr-review/pr-existing-comments.sh b/.github/scripts/pr-review/pr-existing-comments.sh new file mode 100755 index 0000000000..10fa05f168 --- /dev/null +++ b/.github/scripts/pr-review/pr-existing-comments.sh @@ -0,0 +1,190 @@ +#!/bin/bash +# pr-existing-comments.sh - Fetch existing review threads on a PR +# +# Usage: +# pr-existing-comments.sh - Show all review threads with full details +# pr-existing-comments.sh --summary - Show per-file summary only (for large PRs) +# pr-existing-comments.sh --unresolved - Show only unresolved threads +# pr-existing-comments.sh --file - Show threads for a specific file +# pr-existing-comments.sh --full - Show full comment text (no truncation) +# +# Output: Formatted summary of existing review threads grouped by file, +# showing thread status, comments, and whether issues were addressed. +# +# For large PRs, use --summary first to see the overview, then --file +# to get full thread details when reviewing each file. +# +# Environment variables (set by the composite action): +# PR_REVIEW_REPO - Repository (owner/repo) +# PR_REVIEW_PR_NUMBER - Pull request number + +set -e + +# Configuration from environment +REPO="${PR_REVIEW_REPO:?PR_REVIEW_REPO environment variable is required}" +PR_NUMBER="${PR_REVIEW_PR_NUMBER:?PR_REVIEW_PR_NUMBER environment variable is required}" + +OWNER="${REPO%/*}" +REPO_NAME="${REPO#*/}" + +# Parse arguments +FILTER_UNRESOLVED=false +FILTER_FILE="" +SUMMARY_ONLY=false +FULL_TEXT=false + +while [ $# -gt 0 ]; do + case "$1" in + --unresolved) + FILTER_UNRESOLVED=true + shift + ;; + --file) + FILTER_FILE="$2" + shift 2 + ;; + --summary) + SUMMARY_ONLY=true + shift + ;; + --full) + FULL_TEXT=true + shift + ;; + *) + echo "Usage: pr-existing-comments.sh [--summary] [--unresolved] [--file ] [--full]" + exit 1 + ;; + esac +done + +# Fetch review threads via GraphQL +THREADS=$(gh api graphql -f query=' + query($owner: String!, $repo: String!, $prNumber: Int!) { + repository(owner: $owner, name: $repo) { + pullRequest(number: $prNumber) { + reviewThreads(first: 100) { + nodes { + id + isResolved + isOutdated + path + line + originalLine + startLine + originalStartLine + diffSide + comments(first: 50) { + nodes { + id + body + author { login } + createdAt + originalCommit { abbreviatedOid } + } + } + } + } + } + } + }' -F owner="$OWNER" \ + -F repo="$REPO_NAME" \ + -F prNumber="$PR_NUMBER" \ + --jq '.data.repository.pullRequest.reviewThreads.nodes') + +if [ -z "$THREADS" ] || [ "$THREADS" = "null" ]; then + echo "No existing review threads found." + exit 0 +fi + +# Apply filters +FILTERED="$THREADS" + +if [ "$FILTER_UNRESOLVED" = true ]; then + FILTERED=$(echo "$FILTERED" | jq '[.[] | select(.isResolved == false)]') +fi + +if [ -n "$FILTER_FILE" ]; then + FILTERED=$(echo "$FILTERED" | jq --arg file "$FILTER_FILE" '[.[] | select(.path == $file)]') +fi + +THREAD_COUNT=$(echo "$FILTERED" | jq 'length') + +if [ "$THREAD_COUNT" -eq 0 ]; then + if [ "$FILTER_UNRESOLVED" = true ]; then + echo "No unresolved review threads found." + elif [ -n "$FILTER_FILE" ]; then + echo "No review threads found for ${FILTER_FILE}." + else + echo "No existing review threads found." + fi + exit 0 +fi + +# Count resolved vs unresolved +RESOLVED_COUNT=$(echo "$FILTERED" | jq '[.[] | select(.isResolved == true)] | length') +UNRESOLVED_COUNT=$(echo "$FILTERED" | jq '[.[] | select(.isResolved == false)] | length') +OUTDATED_COUNT=$(echo "$FILTERED" | jq '[.[] | select(.isOutdated == true)] | length') + +echo "Existing review threads: ${THREAD_COUNT} total (${UNRESOLVED_COUNT} unresolved, ${RESOLVED_COUNT} resolved, ${OUTDATED_COUNT} outdated)" +echo "" + +# Summary mode: show per-file counts only +if [ "$SUMMARY_ONLY" = true ]; then + echo "Threads by file:" + echo "$FILTERED" | jq -r ' + group_by(.path) | .[] | + . as $threads | + ($threads | length) as $total | + ([$threads[] | select(.isResolved == false)] | length) as $unresolved | + ([$threads[] | select(.isResolved == true)] | length) as $resolved | + ([$threads[] | select(.isOutdated == true)] | length) as $outdated | + ([$threads[] | select(.comments.nodes | length > 1)] | length) as $has_replies | + " " + $threads[0].path + + " β€” " + ($total | tostring) + " threads" + + " (" + ($unresolved | tostring) + " unresolved, " + ($resolved | tostring) + " resolved" + + (if $outdated > 0 then ", " + ($outdated | tostring) + " outdated" else "" end) + + ")" + + (if $has_replies > 0 then " ⚠️ " + ($has_replies | tostring) + " with replies" else "" end) + ' + echo "" + echo "Use: pr-existing-comments.sh --file to see full thread details for a file" + exit 0 +fi + +# Full detail mode: output threads grouped by file +# Show full conversation for threads with replies +FIRST_LIMIT=200 +REPLY_LIMIT=300 +if [ "$FULL_TEXT" = true ]; then + FIRST_LIMIT=999999 + REPLY_LIMIT=999999 +fi + +echo "$FILTERED" | jq -r --argjson first_limit "$FIRST_LIMIT" --argjson reply_limit "$REPLY_LIMIT" ' + group_by(.path) | .[] | + "## " + .[0].path + " (" + (length | tostring) + " threads)\n" + + ([.[] | + " " + + (if .isResolved then "βœ… RESOLVED" elif .isOutdated then "⚠️ OUTDATED" else "πŸ”΄ UNRESOLVED" end) + + " (line " + (if .line then (.line | tostring) elif .startLine then (.startLine | tostring) elif .originalLine then ("~" + (.originalLine | tostring)) elif .originalStartLine then ("~" + (.originalStartLine | tostring)) else "?" end) + ")" + + # Show the commit the comment was originally made on + (if .comments.nodes[0].originalCommit.abbreviatedOid then " [" + .comments.nodes[0].originalCommit.abbreviatedOid + "]" else "" end) + + # Flag threads with replies β€” indicates a conversation happened + (if (.comments.nodes | length) > 1 then " ← has replies" else "" end) + + "\n" + + ([.comments.nodes | to_entries[] | + .value as $comment | + .key as $idx | + ($comment.body | gsub("\n"; " ")) as $flat | + if $idx == 0 then + " @" + ($comment.author.login // "unknown") + ": " + $flat[0:$first_limit] + + (if ($flat | length) > $first_limit then " [truncated]" else "" end) + else + " ↳ @" + ($comment.author.login // "unknown") + ": " + $flat[0:$reply_limit] + + (if ($flat | length) > $reply_limit then " [truncated]" else "" end) + end + ] | join("\n")) + + "\n" + ] | join("\n")) +' diff --git a/.github/scripts/pr-review/pr-remove-comment.sh b/.github/scripts/pr-review/pr-remove-comment.sh new file mode 100755 index 0000000000..04b73fbf65 --- /dev/null +++ b/.github/scripts/pr-review/pr-remove-comment.sh @@ -0,0 +1,84 @@ +#!/bin/bash +# pr-remove-comment.sh - Remove a queued review comment +# +# Usage: +# pr-remove-comment.sh +# pr-remove-comment.sh +# +# Examples: +# pr-remove-comment.sh src/main.go 42 +# pr-remove-comment.sh comment-1234567890-1234567890 +# +# This script removes a previously queued comment before it's submitted. +# Useful if the agent realizes it made a mistake or wants to update a comment. +# +# Environment variables (set by the composite action): +# PR_REVIEW_COMMENTS_DIR - Directory containing comment files (default: /tmp/pr-review-comments) + +set -e + +COMMENTS_DIR="${PR_REVIEW_COMMENTS_DIR:-/tmp/pr-review-comments}" + +if [ ! -d "${COMMENTS_DIR}" ]; then + echo "No comments directory found: ${COMMENTS_DIR}" + exit 0 +fi + +# Check if first argument looks like a comment ID +if [[ "$1" =~ ^comment- ]]; then + COMMENT_ID="$1" + COMMENT_FILE="${COMMENTS_DIR}/${COMMENT_ID}.json" + + if [ -f "${COMMENT_FILE}" ]; then + FILE=$(jq -r '._meta.file // .path' "${COMMENT_FILE}") + LINE=$(jq -r '._meta.line // .line' "${COMMENT_FILE}") + rm -f "${COMMENT_FILE}" + echo "βœ“ Removed comment ${COMMENT_ID} for ${FILE}:${LINE}" + else + echo "Comment not found: ${COMMENT_ID}" + exit 1 + fi +else + # Treat as file and line number + FILE="$1" + LINE="$2" + + if [ -z "$FILE" ] || [ -z "$LINE" ]; then + echo "Usage:" + echo " pr-remove-comment.sh " + echo " pr-remove-comment.sh " + echo "" + echo "Examples:" + echo " pr-remove-comment.sh src/main.go 42" + echo " pr-remove-comment.sh comment-1234567890-1234567890" + exit 1 + fi + + # Validate line is a positive integer (>= 1) + if ! [[ "$LINE" =~ ^[1-9][0-9]*$ ]]; then + echo "Error: Line number must be a positive integer (>= 1), got: $LINE" + exit 1 + fi + + # Find and remove matching comment files + # Use nullglob to handle case where no files match + shopt -s nullglob + REMOVED=0 + for COMMENT_FILE in "${COMMENTS_DIR}"/comment-*.json; do + + COMMENT_FILE_PATH=$(jq -r '._meta.file // .path' "${COMMENT_FILE}") + COMMENT_LINE=$(jq -r '._meta.line // .line' "${COMMENT_FILE}") + + if [ "$COMMENT_FILE_PATH" = "$FILE" ] && [ "$COMMENT_LINE" = "$LINE" ]; then + COMMENT_ID=$(basename "${COMMENT_FILE}" .json) + rm -f "${COMMENT_FILE}" + echo "βœ“ Removed comment ${COMMENT_ID} for ${FILE}:${LINE}" + REMOVED=$((REMOVED + 1)) + fi + done + + if [ "$REMOVED" -eq 0 ]; then + echo "No comment found for ${FILE}:${LINE}" + exit 1 + fi +fi diff --git a/.github/scripts/pr-review/pr-review.sh b/.github/scripts/pr-review/pr-review.sh new file mode 100755 index 0000000000..48c0b6888a --- /dev/null +++ b/.github/scripts/pr-review/pr-review.sh @@ -0,0 +1,143 @@ +#!/bin/bash +# pr-review.sh - Submit a PR review (approve, request changes, or comment) +# +# Usage: pr-review.sh [review-body] +# Example: pr-review.sh REQUEST_CHANGES "Please fix the issues noted above" +# +# This script creates and submits a review with any queued inline comments. +# Comments are read from individual files in PR_REVIEW_COMMENTS_DIR (created by pr-comment.sh). +# +# The review body can contain special characters (backticks, dollar signs, etc.) +# and will be safely passed to the GitHub API without shell interpretation. +# +# Environment variables (set by the composite action): +# PR_REVIEW_REPO - Repository (owner/repo) +# PR_REVIEW_PR_NUMBER - Pull request number +# PR_REVIEW_HEAD_SHA - HEAD commit SHA +# PR_REVIEW_COMMENTS_DIR - Directory containing queued comment files (default: /tmp/pr-review-comments) + +set -e + +# Configuration from environment +REPO="${PR_REVIEW_REPO:?PR_REVIEW_REPO environment variable is required}" +PR_NUMBER="${PR_REVIEW_PR_NUMBER:?PR_REVIEW_PR_NUMBER environment variable is required}" +HEAD_SHA="${PR_REVIEW_HEAD_SHA:?PR_REVIEW_HEAD_SHA environment variable is required}" +COMMENTS_DIR="${PR_REVIEW_COMMENTS_DIR:-/tmp/pr-review-comments}" + +# Arguments +EVENT="$1" +shift 2>/dev/null || true + +# Read body from remaining arguments +# Join all remaining arguments with spaces, preserving the string as-is +BODY="$*" + +if [ -z "$EVENT" ]; then + echo "Usage: pr-review.sh [review-body]" + echo "Example: pr-review.sh REQUEST_CHANGES 'Please fix the issues noted in the inline comments'" + exit 1 +fi + +# Validate event type +case "$EVENT" in + APPROVE|REQUEST_CHANGES|COMMENT) + ;; + *) + echo "Error: Invalid event type '${EVENT}'" + echo "Must be one of: APPROVE, REQUEST_CHANGES, COMMENT" + exit 1 + ;; +esac + +# Read queued comments from individual files +COMMENTS="[]" +COMMENT_COUNT=0 + +if [ -d "${COMMENTS_DIR}" ]; then + # Collect all comment files and merge into a single JSON array + # Remove _meta fields before submitting (they're only for internal use) + COMMENT_FILES=("${COMMENTS_DIR}"/comment-*.json) + + if [ -f "${COMMENT_FILES[0]}" ]; then + # Use jq to read all comment files, extract the comment data (without _meta), and combine + COMMENTS=$(jq -s '[.[] | del(._meta)]' "${COMMENTS_DIR}"/comment-*.json) + COMMENT_COUNT=$(echo "$COMMENTS" | jq 'length') + if [ "$COMMENT_COUNT" -gt 0 ]; then + echo "Found ${COMMENT_COUNT} queued inline comment(s)" + fi + fi +fi + +# Append standard footer to the review body (if body is provided) +FOOTER=' + +--- +Marvin Context Protocol | Type `/marvin` to interact further + +Give us feedback! React with πŸš€ if perfect, πŸ‘ if helpful, πŸ‘Ž if not.' + +if [ -n "$BODY" ]; then + BODY_WITH_FOOTER="${BODY}${FOOTER}" +else + BODY_WITH_FOOTER="" +fi + +# Build the review request JSON +# Use jq to safely construct the JSON with all special characters handled +REVIEW_JSON=$(jq -n \ + --arg commit_id "$HEAD_SHA" \ + --arg event "$EVENT" \ + --arg body "$BODY_WITH_FOOTER" \ + --argjson comments "$COMMENTS" \ + '{ + commit_id: $commit_id, + event: $event, + comments: $comments + } + (if $body != "" then {body: $body} else {} end)') + +# Check if HEAD has changed since review started (race condition detection) +CURRENT_HEAD=$(gh api "repos/${REPO}/pulls/${PR_NUMBER}" --jq '.head.sha') +if [ "$CURRENT_HEAD" != "$HEAD_SHA" ]; then + echo "⚠️ WARNING: PR head has changed since review started!" + echo " Review started at: ${HEAD_SHA:0:7}" + echo " Current head: ${CURRENT_HEAD:0:7}" + echo "" + echo " New commits may have shifted line numbers. Review will be submitted" + echo " against the original commit (${HEAD_SHA:0:7}) but comments may be outdated." + echo "" +fi + +echo "Submitting ${EVENT} review for commit ${HEAD_SHA:0:7}..." + +# Create and submit the review in one API call +# Use a temp file to safely pass the JSON body +TEMP_JSON=$(mktemp) +trap "rm -f ${TEMP_JSON}" EXIT +echo "$REVIEW_JSON" > "${TEMP_JSON}" + +RESPONSE=$(gh api "repos/${REPO}/pulls/${PR_NUMBER}/reviews" \ + -X POST \ + --input "${TEMP_JSON}" 2>&1) || { + echo "Error submitting review:" + echo "$RESPONSE" + exit 1 +} + +# Clean up the comments directory after successful submission +if [ -d "${COMMENTS_DIR}" ] && [ "$COMMENT_COUNT" -gt 0 ]; then + rm -f "${COMMENTS_DIR}"/comment-*.json + # Remove directory if empty + rmdir "${COMMENTS_DIR}" 2>/dev/null || true +fi + +REVIEW_URL=$(echo "$RESPONSE" | jq -r '.html_url // empty') +REVIEW_STATE=$(echo "$RESPONSE" | jq -r '.state // empty') + +if [ -n "$REVIEW_URL" ]; then + echo "βœ“ Review submitted (${REVIEW_STATE}): ${REVIEW_URL}" + if [ "$COMMENT_COUNT" -gt 0 ]; then + echo " Included ${COMMENT_COUNT} inline comment(s)" + fi +else + echo "βœ“ Review submitted successfully" +fi diff --git a/.github/workflows/martian-issue-triage.yml b/.github/workflows/martian-issue-triage.yml deleted file mode 100644 index 276c7142e6..0000000000 --- a/.github/workflows/martian-issue-triage.yml +++ /dev/null @@ -1,178 +0,0 @@ -name: Martian Issue Triage - -on: - issues: - types: [opened, labeled] - -jobs: - martian-issue-triage: - # For labeled events, verify the labeler is a repo member to prevent privilege escalation - if: | - (github.event.action == 'opened' && contains(fromJSON('["strawgate", "jlowin"]'), github.actor)) || - (github.event.action == 'labeled' && github.event.label.name == 'triage-martian' && contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.sender.author_association)) - - concurrency: - group: triage-martian-${{ github.event.issue.number }} - cancel-in-progress: true - - runs-on: ubuntu-latest - timeout-minutes: 10 - permissions: - contents: read - issues: write - pull-requests: read - id-token: write - - steps: - - name: Checkout base repository - uses: actions/checkout@v6 - with: - repository: ${{ github.repository }} - ref: ${{ github.event.repository.default_branch }} - - # Install UV package manager - - name: Install UV - uses: astral-sh/setup-uv@v7 - with: - enable-cache: true - cache-dependency-glob: "uv.lock" - - - name: Generate Marvin App token - id: marvin-token - uses: actions/create-github-app-token@v2 - with: - app-id: ${{ secrets.MARVIN_APP_ID }} - private-key: ${{ secrets.MARVIN_APP_PRIVATE_KEY }} - - - name: Set triage prompt - id: triage-prompt - run: | - cat >> $GITHUB_OUTPUT << 'EOF' - PROMPT< and tags. You shouldn't put everything in collapsible sections, especially if the response is short. Use your discretion to determine when to use collapsible sections to avoid overwhelming the reader with too much detail -- think of them like an appendix that can be expanded if the reader is interested. - - # Example output for "Recommendation" part of the response - PR #654 already implements the requested feature but is incomplete. The Pull Request is not in a mergeable state yet, the remaining work should be completed: 1) update the Calculator.divide method to utilize the new DivisionByZeroError or the safe_divide function, and 2) update the tests to ensure that the Calculator.divide method raises the new DivisionByZeroError when the divisor is 0. - -
- Findings - ...details from the code analysis that are relevant to the issue and the recommendation... -
- -
- Detailed Action Plan - ...a detailed plan that a junior developer could follow to implement the recommendation... -
- - # Example Output for "Related Items" part of the response - -
- Related Issues and Pull Requests - - | Repository | Issue or PR | Relevance | - | --- | --- | --- | - | jlowin/fastmcp | [Add matrix operations support](https://github.com/jlowin/fastmcp/pull/680) | This pull request directly addresses the feature request for adding matrix operations to the calculator. | - | jlowin/fastmcp | [Add matrix operations support](https://github.com/jlowin/fastmcp/issues/681) | This issue directly addresses the feature request for adding matrix operations to the calculator. | -
- -
- Related Files - - | Repository | File | Relevance | Sections | - | --- | --- | --- | --- | - | modelcontextprotocol/python-sdk | [test_calculator.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/test_calculator.py) | This file contains the test cases for the Calculator class, including a test that specifically asserts a ValueError is raised for division by zero, confirming the current intended behavior. | [25-27](https://github.com/modelcontextprotocol/python-sdk/blob/main/test_calculator.py#L25-L27) | - | modelcontextprotocol/python-sdk | [calculator.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/calculator.py) | This file contains the implementation of the Calculator class, specifically the `divide` method which raises the ValueError when dividing by zero, matching the bug report. | [29-32](https://github.com/modelcontextprotocol/python-sdk/blob/main/calculator.py#L29-L32) | -
- -
- Related Webpages - | Name | URL | Relevance | - | --- | --- | --- | - | Handling Division by Zero Best Practices | https://my-blog-about-division-by-zero.com/handling+division+by+zero+in+calculator | This webpage provides general best practices for handling division by zero in calculator applications and in Python, which is directly relevant to the issue and potential solutions. | -
- - PROMPT_END - EOF - - - name: Setup GitHub MCP Server - run: | - mkdir -p /tmp/mcp-config - cat > /tmp/mcp-config/mcp-servers.json << 'EOF' - { - "mcpServers": { - "repository-summary": { - "type": "http", - "url": "https://agents-md-generator.fastmcp.app/mcp" - }, - "code-search": { - "type": "http", - "url": "https://public-code-search.fastmcp.app/mcp" - }, - "github-research": { - "type": "stdio", - "command": "uvx", - "args": [ - "github-research-mcp" - ], - "env": { - "DISABLE_SUMMARIES": "true", - "GITHUB_PERSONAL_ACCESS_TOKEN": "${{ steps.marvin-token.outputs.token }}" - } - } - } - } - EOF - - - name: Clean up stale Claude locks - run: rm -rf ~/.claude/.locks ~/.local/state/claude/locks || true - - - name: Run Martian for Issue Triage - uses: anthropics/claude-code-action@v1 - with: - github_token: ${{ steps.marvin-token.outputs.token }} - bot_name: "Marvin Context Protocol" - prompt: ${{ steps.triage-prompt.outputs.PROMPT }} - anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY_FOR_CI }} - track_progress: true - claude_args: | - --model claude-sonnet-4-5-20250929 - --allowedTools mcp__repository-summary,mcp__code-search__search_code,mcp__github-research__get_repository,mcp__github-research__get_issue,mcp__github-research__get_pull_request,mcp__github-research__search_issues,mcp__github-research__search_pull_requests,mcp__github-research__get_files - --mcp-config /tmp/mcp-config/mcp-servers.json - settings: | - { - "GH_TOKEN": "${{ steps.marvin-token.outputs.token }}" - } diff --git a/.github/workflows/martian-triage-issue.yml b/.github/workflows/martian-triage-issue.yml new file mode 100644 index 0000000000..009fadfeb0 --- /dev/null +++ b/.github/workflows/martian-triage-issue.yml @@ -0,0 +1,204 @@ +# Triage new issues: investigate, recommend, apply labels +# Calls run-claude directly with triage prompt (elastic issue-triage style) + +name: Triage Issue + +on: + issues: + types: [opened] + +jobs: + triage: + if: | + contains(fromJSON('["jlowin", "strawgate"]'), github.event.issue.user.login) + concurrency: + group: triage-issue-${{ github.event.issue.number }} + cancel-in-progress: true + + runs-on: ubuntu-latest + timeout-minutes: 10 + permissions: + contents: read + issues: write + pull-requests: read + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + with: + repository: ${{ github.repository }} + ref: ${{ github.event.repository.default_branch }} + + - name: Generate Marvin App token + id: marvin-token + uses: actions/create-github-app-token@v2 + with: + app-id: ${{ secrets.MARVIN_APP_ID }} + private-key: ${{ secrets.MARVIN_APP_PRIVATE_KEY }} + + - name: React to issue with eyes + env: + GH_TOKEN: ${{ steps.marvin-token.outputs.token }} + run: | + gh api "repos/${{ github.repository }}/issues/${{ github.event.issue.number }}/reactions" -f content=eyes 2>/dev/null || true + + - name: Run Claude for Triage + uses: ./.github/actions/run-claude + with: + claude-oauth-token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} + github-token: ${{ steps.marvin-token.outputs.token }} + allowed-tools: "Edit,MultiEdit,Glob,Grep,LS,Read,Write,WebSearch,WebFetch,mcp__github_comment__update_claude_comment,mcp__github_ci__get_ci_status,mcp__github_ci__get_workflow_run_details,mcp__github_ci__download_job_log,Bash(*),mcp__agents-md-generator__generate_agents_md,mcp__public-code-search__search_code" + prompt: | + + Repository: ${{ github.repository }} + Issue Number: #${{ github.event.issue.number }} + Issue Title: ${{ github.event.issue.title }} + Issue Author: ${{ github.event.issue.user.login }} + + + + ${{ github.event.issue.body }} + + + + Triage this new GitHub issue and provide a helpful, actionable response. You can write files and execute commands to test, verify, or investigate the issue. + + + + This workflow is for investigation, testing, and planning. + + You CANNOT: Create branches, checkout branches, commit code to the repository + Do not push changes to the repository. + You CAN: Read/analyze code, search repository, review git history, search for similar issues, write files, verify behavior, provide analysis and recommendations + + + + You have access to the following tools (comma-separated list): + + Edit,MultiEdit,Glob,Grep,LS,Read,Write,WebSearch,WebFetch,mcp__github_comment__update_claude_comment,mcp__github_ci__get_ci_status,mcp__github_ci__get_workflow_run_details,mcp__github_ci__download_job_log,Bash(*),mcp__agents-md-generator__generate_agents_md,mcp__public-code-search__search_code + + You can only use tools that are explicitly listed above. For Bash commands, the pattern `Bash(command:*)` means you can run that command with any arguments. If a command is not listed, it is not available. + + + + Use `mcp__agents-md-generator__generate_agents_md` to get repository context before triaging. + + + + - `mcp__public-code-search__search_code`: Search code in OTHER repositories (use `Grep`/`Read` for this repo) + - `WebSearch`: Search the web for documentation, best practices, or solutions + - `WebFetch`: Fetch and read content from URLs + - Git commands: You have access to git commands, but write commands (commit, push, checkout, branch creation) are blocked + - Write: You can write files (e.g., test files, temporary files for verification) + - Execution: See `` section above for exact list of available execution commands + + + + If execution commands are available (check `` section), you can: + - Run tests to verify reported bugs or test proposed solutions + - Execute scripts to understand behavior + - Run linters or static analysis tools + - Verify environment setup or dependencies + - Test specific code paths or scenarios + - Write test files to confirm behavior + + When executing commands: + - Explain what you're testing and why + - Include command output in your response when relevant + - Use execution to validate your findings and recommendations + - Only use commands that are explicitly listed in `` + + + + Your number one priority is to provide a great response to the issue. A great response is a response that is clear, concise, accurate, and actionable. You will avoid long paragraphs, flowery language, and overly verbose responses. Your readers have limited time and attention, so you will be concise and to the point. + + In priority order your goal is to: + 1. Provide context about the request or issue (related issues, pull requests, files, etc.) + 2. Layout a single high-quality and actionable recommendation for how to address the issue based on your knowledge of the project, codebase, and issue + 3. Provide a high quality and detailed plan that a junior developer could follow to implement the recommendation + 4. Use execution to verify findings when appropriate (check `` section for available commands) + + + + Populate the following sections in your response: + Recommendation (or "No recommendation" with reason) + Findings + Verification (if you executed tests or commands - check `` section) + Detailed Action Plan + Related Items + Related Files + Related Webpages + + You may not be able to do all of these things, sometimes you may find that all you can do is provide in-depth context of the issue and related items. That's perfectly acceptable and expected. Your performance is judged by how accurate your findings are, do the investigation required to have high confidence in your findings and recommendations. "I don't know" or "I'm unable to recommend a course of action" is better than a bad or wrong answer. + + When formulating your response, you will never "bury the lede", you will always provide a clear and concise tl;dr as the first thing in your response. As your response grows in length you can organize the more detailed parts of your response collapsible sections using
and tags. You shouldn't put everything in collapsible sections, especially if the response is short. Use your discretion to determine when to use collapsible sections to avoid overwhelming the reader with too much detail -- think of them like an appendix that can be expanded if the reader is interested. + + + + # Example output for "Recommendation" part of the response + PR #654 already implements the requested feature but is incomplete. The Pull Request is not in a mergeable state yet, the remaining work should be completed: 1) update the Calculator.divide method to utilize the new DivisionByZeroError or the safe_divide function, and 2) update the tests to ensure that the Calculator.divide method raises the new DivisionByZeroError when the divisor is 0. + +
+ Findings + ...details from the code analysis that are relevant to the issue and the recommendation... +
+ +
+ Verification + I ran the existing tests (if execution commands are available in ``) and confirmed the current behavior: + ```bash + $ pytest test_calculator.py::test_divide_by_zero + FAILED - raises ValueError instead of DivisionByZeroError + ``` + This confirms the issue report is accurate. +
+ +
+ Detailed Action Plan + ...a detailed plan that a junior developer could follow to implement the recommendation... +
+ + # Example Output for "Related Items" part of the response + +
+ Related Issues and Pull Requests + + | Repository | Issue or PR | Relevance | + | --- | --- | --- | + | jlowin/fastmcp | [Add matrix operations support](https://github.com/jlowin/fastmcp/pull/680) | This pull request directly addresses the feature request for adding matrix operations to the calculator. | + | jlowin/fastmcp | [Add matrix operations support](https://github.com/jlowin/fastmcp/issues/681) | This issue directly addresses the feature request for adding matrix operations to the calculator. | +
+ +
+ Related Files + + | Repository | File | Relevance | Sections | + | --- | --- | --- | --- | + | modelcontextprotocol/python-sdk | [test_calculator.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/test_calculator.py) | This file contains the test cases for the Calculator class, including a test that specifically asserts a ValueError is raised for division by zero, confirming the current intended behavior. | [25-27](https://github.com/modelcontextprotocol/python-sdk/blob/main/test_calculator.py#L25-L27) | + | modelcontextprotocol/python-sdk | [calculator.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/calculator.py) | This file contains the implementation of the Calculator class, specifically the `divide` method which raises the ValueError when dividing by zero, matching the bug report. | [29-32](https://github.com/modelcontextprotocol/python-sdk/blob/main/calculator.py#L29-L32) | +
+ +
+ Related Webpages + + | Name | URL | Relevance | + | --- | --- | --- | + | Handling Division by Zero Best Practices | https://my-blog-about-division-by-zero.com/handling+division+by+zero+in+calculator | This webpage provides general best practices for handling division by zero in calculator applications and in Python, which is directly relevant to the issue and potential solutions. | +
+
+ + + Always end your comment with a new line, three dashes, and the footer message: + + + --- + Marvin Context Protocol | Type `/marvin` to interact further + + Give us feedback! React with πŸš€ if perfect, πŸ‘ if helpful, πŸ‘Ž if not. + + + + + When writing GitHub comments, wrap branch names, tags, or other @-references in backticks (e.g., `@main`, `@v1.0`) to avoid accidentally pinging users. Do not add backticks around terms that are already inside backticks or code blocks. + diff --git a/.github/workflows/marvin-comment-on-issue.yml b/.github/workflows/marvin-comment-on-issue.yml new file mode 100644 index 0000000000..1ecc918069 --- /dev/null +++ b/.github/workflows/marvin-comment-on-issue.yml @@ -0,0 +1,144 @@ +# Respond to /marvin mentions in issue comments (elastic mention-in-issue style) +# Calls run-claude directly + +name: Comment on Issue + +on: + issue_comment: + types: [created] + +permissions: + contents: write + issues: write + pull-requests: read + id-token: write + +jobs: + comment: + if: | + !github.event.issue.pull_request && + contains(github.event.comment.body, '/marvin') && + contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.comment.author_association) + runs-on: ubuntu-latest + timeout-minutes: 15 + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Install UV + uses: astral-sh/setup-uv@v7 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Install dependencies + run: uv sync --python 3.12 + + - name: Run prek + uses: j178/prek-action@v1 + env: + SKIP: no-commit-to-branch + + - name: Generate Marvin App token + id: marvin-token + uses: actions/create-github-app-token@v2 + with: + app-id: ${{ secrets.MARVIN_APP_ID }} + private-key: ${{ secrets.MARVIN_APP_PRIVATE_KEY }} + + - name: React to comment with eyes + env: + GH_TOKEN: ${{ steps.marvin-token.outputs.token }} + run: | + gh api "repos/${{ github.repository }}/issues/comments/${{ github.event.comment.id }}/reactions" -f content=eyes 2>/dev/null || true + + - name: Run Claude for Issue Comment + uses: ./.github/actions/run-claude + with: + claude-oauth-token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} + github-token: ${{ steps.marvin-token.outputs.token }} + trigger-phrase: "/marvin" + allowed-bots: "*" + allowed-tools: "Edit,MultiEdit,Glob,Grep,LS,Read,Write,WebSearch,WebFetch,mcp__github_comment__update_claude_comment,mcp__github_ci__get_ci_status,mcp__github_ci__get_workflow_run_details,mcp__github_ci__download_job_log,Bash(*),mcp__agents-md-generator__generate_agents_md,mcp__public-code-search__search_code" + prompt: | + + Repository: ${{ github.repository }} + Issue Number: #${{ github.event.issue.number }} + Issue Title: ${{ github.event.issue.title }} + Issue Author: ${{ github.event.issue.user.login }} + Comment Author: ${{ github.event.comment.user.login }} + + + + ${{ github.event.comment.body }} + + + + You have been mentioned in a GitHub issue comment. Understand the request, gather context, complete the task, and respond with results. + + + + This workflow allows read, write, and execute capabilities but cannot push changes. + + You CAN: Read/analyze code, modify files, write code, run tests, execute commands + You CANNOT: Commit code, push changes, create branches, checkout branches, create pull requests + + **Important**: You cannot push changes to the repository - you can only make changes locally and provide feedback or recommendations. + + + + You have access to the following tools (comma-separated list): + + Edit,MultiEdit,Glob,Grep,LS,Read,Write,WebSearch,WebFetch,mcp__github_comment__update_claude_comment,mcp__github_ci__get_ci_status,mcp__github_ci__get_workflow_run_details,mcp__github_ci__download_job_log,Bash(*),mcp__agents-md-generator__generate_agents_md,mcp__public-code-search__search_code + + You can only use tools that are explicitly listed above. For Bash commands, the pattern `Bash(command:*)` means you can run that command with any arguments. If a command is not listed, it is not available. + + + + Use `mcp__agents-md-generator__generate_agents_md` to get repository context before responding. + + + + Be thorough in your investigations: + - Understand the full context of the repository + - Review related code, issues, and PRs + - Consider edge cases and implications + - Gather all relevant information before responding + + Available tools: + - `mcp__public-code-search__search_code`: Search code in OTHER repositories (use `Grep`/`Read` for this repo) + - `WebSearch`: Search the web for documentation, best practices, or solutions + - `WebFetch`: Fetch and read content from URLs + + + + - Answer questions about the codebase + - Help debug reported problems (make changes locally to test, cannot push) + - Suggest solutions or workarounds + - Provide code examples + - Help clarify requirements + - Link to relevant documentation or code + + + + - Be concise and actionable + - If the request is unclear, ask clarifying questions + - If the request requires actions you cannot perform (like pushing changes), explain what you can and cannot do + - When making code changes, explain that they are local only and cannot be pushed + + + + Always end your comment with a new line, three dashes, and the footer message: + + + --- + Marvin Context Protocol | Type `/marvin` to interact further + + Give us feedback! React with πŸš€ if perfect, πŸ‘ if helpful, πŸ‘Ž if not. + + + + + When writing GitHub comments, wrap branch names, tags, or other @-references in backticks (e.g., `@main`, `@v1.0`) to avoid accidentally pinging users. Do not add backticks around terms that are already inside backticks or code blocks. + diff --git a/.github/workflows/marvin-comment-on-pr.yml b/.github/workflows/marvin-comment-on-pr.yml new file mode 100644 index 0000000000..09f699522c --- /dev/null +++ b/.github/workflows/marvin-comment-on-pr.yml @@ -0,0 +1,284 @@ +# Respond to /marvin mentions in PR review comments and issue comments on PRs +# Calls run-claude directly + +name: Comment on PR + +on: + issue_comment: + types: [created] + +permissions: + contents: write + pull-requests: write + issues: read + id-token: write + +jobs: + comment: + if: | + github.event.issue.pull_request && + contains(github.event.comment.body, '/marvin') && + contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.comment.author_association) + runs-on: ubuntu-latest + timeout-minutes: 15 + + steps: + - name: Checkout PR head branch + uses: actions/checkout@v6 + with: + # do not set to pull_request.head.ref, claude will pull the branch if needed + fetch-depth: 0 + + - name: Install UV + uses: astral-sh/setup-uv@v7 + with: + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Install dependencies + run: uv sync --python 3.12 + + - name: Run prek + uses: j178/prek-action@v1 + env: + SKIP: no-commit-to-branch + + - name: Generate Marvin App token + id: marvin-token + uses: actions/create-github-app-token@v2 + with: + app-id: ${{ secrets.MARVIN_APP_ID }} + private-key: ${{ secrets.MARVIN_APP_PRIVATE_KEY }} + + - name: React to comment with eyes + env: + GH_TOKEN: ${{ steps.marvin-token.outputs.token }} + run: | + gh api "repos/${{ github.repository }}/issues/comments/${{ github.event.comment.id }}/reactions" -f content=eyes 2>/dev/null || true + + - name: Get PR HEAD SHA + id: pr-info + env: + GH_TOKEN: ${{ steps.marvin-token.outputs.token }} + run: | + PR_NUMBER="${{ github.event.issue.number }}" + HEAD_SHA=$(gh api "repos/${{ github.repository }}/pulls/${PR_NUMBER}" --jq '.head.sha') + echo "head_sha=${HEAD_SHA}" >> "$GITHUB_OUTPUT" + echo "pr_number=${PR_NUMBER}" >> "$GITHUB_OUTPUT" + + - name: Run Claude for PR Comment + uses: ./.github/actions/run-claude + env: + MENTION_REPO: ${{ github.repository }} + MENTION_PR_NUMBER: ${{ steps.pr-info.outputs.pr_number }} + MENTION_SCRIPTS: ${{ github.workspace }}/.github/scripts/mention + PR_REVIEW_REPO: ${{ github.repository }} + PR_REVIEW_PR_NUMBER: ${{ steps.pr-info.outputs.pr_number }} + PR_REVIEW_HEAD_SHA: ${{ steps.pr-info.outputs.head_sha }} + PR_REVIEW_COMMENTS_DIR: /tmp/pr-review-comments + PR_REVIEW_HELPERS_DIR: ${{ github.workspace }}/.github/scripts/pr-review + with: + claude-oauth-token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }} + github-token: ${{ steps.marvin-token.outputs.token }} + trigger-phrase: "/marvin" + allowed-bots: "*" + allowed-tools: "Edit,MultiEdit,Glob,Grep,LS,Read,Write,WebSearch,WebFetch,mcp__github_comment__update_claude_comment,mcp__github_ci__get_ci_status,mcp__github_ci__get_workflow_run_details,mcp__github_ci__download_job_log,Bash(*),mcp__agents-md-generator__generate_agents_md,mcp__public-code-search__search_code" + prompt: | + + Repository: ${{ github.repository }} + PR Number: #${{ steps.pr-info.outputs.pr_number }} + PR Title: ${{ github.event.issue.title }} + PR Author: ${{ github.event.issue.user.login }} + Comment Author: ${{ github.event.comment.user.login }} + + **Note**: The PR head branch has already been checked out. The workspace is ready - you can immediately start working on the PR code. + + + + ${{ github.event.comment.body }} + + + + You have been mentioned in a Pull Request comment. Understand the request, gather context, complete the task, and respond with results. + + + + This workflow allows read, write, and execute capabilities but cannot push changes. + + You CAN: Read/analyze code, modify files, write code, run tests, execute commands, resolve review threads + You CANNOT: Commit code, push changes, create branches, checkout branches, create pull requests + + **Important**: You cannot push changes to the repository - you can only make changes locally and provide feedback or recommendations. + + + + You have access to the following tools (comma-separated list): + + Edit,MultiEdit,Glob,Grep,LS,Read,Write,WebSearch,WebFetch,mcp__github_comment__update_claude_comment,mcp__github_ci__get_ci_status,mcp__github_ci__get_workflow_run_details,mcp__github_ci__download_job_log,Bash(*),mcp__agents-md-generator__generate_agents_md,mcp__public-code-search__search_code + + You can only use tools that are explicitly listed above. For Bash commands, the pattern `Bash(command:*)` means you can run that command with any arguments. If a command is not listed, it is not available. + + + + Use `mcp__agents-md-generator__generate_agents_md` to get repository context before responding. + + + + Be thorough in your investigations: + - Understand the full context of the repository + - Review related code, issues, and PRs + - Consider edge cases and implications + - Gather all relevant information before responding + + Available tools: + - `mcp__public-code-search__search_code`: Search code in OTHER repositories (use `Grep`/`Read` for this repo) + - `WebSearch`: Search the web for documentation, best practices, or solutions + - `WebFetch`: Fetch and read content from URLs + + + + - Address review feedback and fix issues (make changes locally, cannot push) + - Answer questions about the changes + - Make additional code changes (local only) + - Resolve review threads after addressing feedback (if changes are made separately) + - Perform PR reviews when asked (use the PR review process below) + + + + When asked to review this PR, follow this structured review process. + The `$PR_REVIEW_HELPERS_DIR` environment variable is pre-configured for all scripts below. + + + Follow these steps in order: + + **Step 1: Gather context** + - Use `mcp__agents-md-generator__generate_agents_md` to get repository context + (if this fails, explore the repository to understand the codebase β€” read key files like README, CONTRIBUTING, etc.) + - Run `$PR_REVIEW_HELPERS_DIR/pr-existing-comments.sh --summary` to see existing review threads per file + - Run `$PR_REVIEW_HELPERS_DIR/pr-diff.sh` to see changed files with line-numbered diffs + (for large PRs, this lists files only β€” review each with `pr-diff.sh `) + + **Step 2: Review each file** + For each changed file: + a. If the summary showed existing threads for this file, first run: + `$PR_REVIEW_HELPERS_DIR/pr-existing-comments.sh --file ` + Read the full thread details. The output uses these conventions: + - `← has replies` β€” a conversation happened; read carefully before commenting + - `[truncated]` β€” comment was cut short; add `--full` if you need the complete text to understand the comment + - `[abc1234]` β€” commit the comment was made on; use `git show abc1234` if needed + - `~42` β€” approximate line from an older revision (exact line no longer maps to current diff) + b. Review the diff. Use `Read` to see full file contents when you need more context. + Identify issues matching review_criteria. Do NOT flag: + - Issues in unchanged code (only review the diff) + - Style preferences handled by linters + - Pre-existing issues not introduced by this PR + - Issues already covered by existing threads (see below) + + **Existing thread rules** (check BEFORE leaving any comment): + - Resolved with reviewer reply β†’ reviewer's decision is final. Do NOT re-flag. + Examples: "It should remain as X", "This is intentional", "No need to do this change" + - Resolved without reply β†’ author likely fixed it. Do NOT re-raise unless the fix introduced a new problem. + - Unresolved β†’ already flagged. Do NOT re-comment. Mention in review body if you have more to add. + - Outdated β†’ code changed. Only re-flag if the issue still applies to the current diff. + When in doubt, do not duplicate. Redundant comments erode trust in the review process. + + **Step 3: Leave comments for NEW issues only** + For each genuinely new issue not covered by existing threads: + ```bash + $PR_REVIEW_HELPERS_DIR/pr-comment.sh \ + --severity \ + --title "Brief description" \ + --why "Risk or impact" <<'EOF' + corrected code here + EOF + ``` + Always provide suggestion code. Use `--no-suggestion` only when the fix requires + changes across multiple locations. Broader architectural concerns belong in the + review body, not inline comments. + + To remove a queued comment: `$PR_REVIEW_HELPERS_DIR/pr-remove-comment.sh ` + + **Step 4: Submit the review** + ```bash + $PR_REVIEW_HELPERS_DIR/pr-review.sh "" + ``` + - REQUEST_CHANGES: Any πŸ”΄ CRITICAL or 🟠 HIGH issues found + - COMMENT: 🟑 MEDIUM issues found (but no critical/high) + - APPROVE: No issues, or only βšͺ LOW / πŸ’¬ NITPICK suggestions + + The review body should include broader architectural concerns not suited for inline comments. + Avoid summarizing the PR or offering praise. If approving with no issues, omit the review body. + A standard footer is automatically appended to all comments and reviews. + + + + πŸ”΄ CRITICAL - Must fix before merge (security vulnerabilities, data corruption, production-breaking bugs) + 🟠 HIGH - Should fix before merge (logic errors, missing validation, significant performance issues) + 🟑 MEDIUM - Address soon, non-blocking (error handling gaps, suboptimal patterns, missing edge cases) + βšͺ LOW - Author discretion, non-blocking (minor improvements, documentation, style not covered by linters) + πŸ’¬ NITPICK - Truly optional (stylistic preferences, alternative approaches β€” safe to ignore) + + + + Focus on these categories, in priority order: + 1. Security vulnerabilities (injection, XSS, auth bypass, secrets exposure) + 2. Logic bugs that could cause runtime failures or incorrect behavior + 3. Data integrity issues (race conditions, missing transactions, corruption risk) + 4. Performance bottlenecks (N+1 queries, memory leaks, blocking operations) + 5. Error handling gaps (unhandled exceptions, missing validation) + 6. Breaking changes to public APIs without migration path + 7. Missing or incorrect test coverage for critical paths + + + + + View unresolved review threads: + ```bash + $MENTION_SCRIPTS/gh-get-review-threads.sh + ``` + + Filter for unresolved threads from a specific reviewer: + ```bash + $MENTION_SCRIPTS/gh-get-review-threads.sh "reviewer-username" + ``` + + Resolve a review thread after addressing feedback: + ```bash + $MENTION_SCRIPTS/gh-resolve-review-thread.sh "THREAD_ID" "Fixed by updating the error handling" + ``` + - `THREAD_ID` is the GraphQL node ID from the review threads output (e.g., `PRRT_kwDOABC123`) + - The comment is optional - use it to explain what you did + + Note: Since you cannot push changes, you can resolve threads to acknowledge feedback, but actual fixes would need to be applied separately. + + + + - Be concise and actionable + - If the request is unclear, ask clarifying questions + - If the request requires actions you cannot perform (like pushing changes), explain what you can and cannot do + - When making code changes, explain that they are local only and cannot be pushed + + **When performing a PR review**: Your substantive feedback belongs in the PR review submission + (via pr-review.sh), not in the comment response. The comment should only report: + - That you've submitted the review (with the outcome: approved, requested changes, etc.) + - Any issues encountered during the review process + - Brief status updates + + Do NOT duplicate the review content in your comment - the review itself contains all the details. + Keep the comment short, e.g., "I've submitted my review requesting changes. See the review for details." + + + + Always end your comment with a new line, three dashes, and the footer message: + + + --- + Marvin Context Protocol | Type `/marvin` to interact further + + Give us feedback! React with πŸš€ if perfect, πŸ‘ if helpful, πŸ‘Ž if not. + + + + + When writing GitHub comments, wrap branch names, tags, or other @-references in backticks (e.g., `@main`, `@v1.0`) to avoid accidentally pinging users. Do not add backticks around terms that are already inside backticks or code blocks. + diff --git a/.github/workflows/marvin.yml b/.github/workflows/marvin.yml deleted file mode 100644 index ba394207d6..0000000000 --- a/.github/workflows/marvin.yml +++ /dev/null @@ -1,87 +0,0 @@ -name: Marvin Context Protocol - -on: - issue_comment: { types: [created] } - pull_request_review_comment: { types: [created] } - pull_request_review: { types: [submitted] } - pull_request: { types: [opened, edited] } - issues: { types: [opened, edited, assigned, labeled] } - discussion: { types: [created, edited, labeled] } - discussion_comment: { types: [created] } - -permissions: - contents: write - issues: write - pull-requests: write - discussions: write - actions: read - id-token: write - -jobs: - marvin: - # Restrict all triggers to repo members (OWNER, MEMBER, COLLABORATOR) - if: | - ( - (github.event_name == 'issue_comment' || github.event_name == 'pull_request_review_comment' || github.event_name == 'discussion_comment') && - contains(github.event.comment.body, '/marvin') && - contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.comment.author_association) - ) || - (github.event_name == 'pull_request_review' && contains(github.event.review.body, '/marvin') && contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.review.author_association)) || - (github.event_name == 'pull_request' && contains(github.event.pull_request.body, '/marvin') && contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.pull_request.author_association)) || - (github.event_name == 'issues' && contains(github.event.issue.body, '/marvin') && contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.issue.author_association)) || - (github.event_name == 'discussion' && contains(github.event.discussion.body, '/marvin') && contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.discussion.author_association)) || - ( - github.event_name == 'issues' && - ((github.event.action == 'assigned' && github.event.assignee.login == 'Marvin Context Protocol') || (github.event.action == 'labeled' && github.event.label.name == 'marvin')) && - contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.sender.author_association) - ) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v6 - - # Install UV package manager - - name: Install UV - uses: astral-sh/setup-uv@v7 - with: - enable-cache: true - cache-dependency-glob: "uv.lock" - - # Install project dependencies - - name: Install dependencies - run: uv sync --python 3.12 - - - name: Run prek - uses: j178/prek-action@v1 - env: - SKIP: no-commit-to-branch - - - name: Generate Marvin App token - id: marvin-token - uses: actions/create-github-app-token@v2 - with: - app-id: ${{ secrets.MARVIN_APP_ID }} - private-key: ${{ secrets.MARVIN_APP_PRIVATE_KEY }} - - - name: Clean up stale Claude locks - run: rm -rf ~/.claude/.locks ~/.local/state/claude/locks || true - - # Marvin Assistant - - name: Run Marvin - uses: anthropics/claude-code-action@v1 - with: - github_token: ${{ steps.marvin-token.outputs.token }} - anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} - trigger_phrase: "/marvin" - allowed_bots: "*" - claude_args: | - --allowedTools WebSearch,WebFetch,Bash(uv:*),Bash(pre-commit:*),Bash(prek:*),Bash(pytest:*),Bash(ruff:*),Bash(ty:*),Bash(git:*),Bash(gh:*),mcp__github__add_issue_comment,mcp__github__create_issue,mcp__github__get_issue,mcp__github__list_issues,mcp__github__search_issues,mcp__github__update_issue,mcp__github__update_issue_comment,mcp__github__create_pull_request,mcp__github__get_pull_request,mcp__github__get_pull_request_comments,mcp__github__get_pull_request_files,mcp__github__get_pull_request_reviews,mcp__github__get_pull_request_status,mcp__github__list_pull_requests,mcp__github__update_pull_request,mcp__github__update_pull_request_branch,mcp__github__update_pull_request_comment,mcp__github__merge_pull_request - additional_permissions: | - actions: read - settings: | - { - "model": "claude-sonnet-4-5-20250929", - "env": { - "GH_TOKEN": "${{ steps.marvin-token.outputs.token }}" - }, - "customInstructions": "When you complete work on an issue: (1) You MUST create a pull request using the mcp__github__create_pull_request tool instead of posting a link, and (2) You MUST add the 'marvin-pr' label to the original issue using mcp__github__update_issue. Even if PR creation fails and you post a link instead, you MUST still add the 'marvin-pr' label. Follow the PR message guidelines in CLAUDE.md." - } diff --git a/.github/workflows/update-config-schema.yml b/.github/workflows/update-config-schema.yml index 06f63234a1..e5d0509f45 100644 --- a/.github/workflows/update-config-schema.yml +++ b/.github/workflows/update-config-schema.yml @@ -1,28 +1,28 @@ name: Update MCPServerConfig Schema -# This workflow runs on merges to main to automatically update the config schema -# by creating a PR when changes are needed. +# Regenerates config schema on PRs and commits it back to the branch, +# so the PR is self-contained and main is correct after merge. on: - push: + pull_request: branches: ["main"] paths: - "src/fastmcp/utilities/mcp_server_config/**" - - "!src/fastmcp/utilities/mcp_server_config/v1/schema.json" # Exclude the local schema file + - "!src/fastmcp/utilities/mcp_server_config/v1/schema.json" workflow_dispatch: permissions: contents: write - pull-requests: write jobs: update-config-schema: timeout-minutes: 5 runs-on: ubuntu-latest + if: >- + github.event_name == 'workflow_dispatch' || + github.event.pull_request.head.repo.full_name == github.repository steps: - - uses: actions/checkout@v6 - - name: Generate Marvin App token id: marvin-token uses: actions/create-github-app-token@v2 @@ -30,6 +30,11 @@ jobs: app-id: ${{ secrets.MARVIN_APP_ID }} private-key: ${{ secrets.MARVIN_APP_PRIVATE_KEY }} + - uses: actions/checkout@v6 + with: + ref: ${{ github.head_ref || github.ref }} + token: ${{ steps.marvin-token.outputs.token }} + - name: Install uv uses: astral-sh/setup-uv@v7 with: @@ -41,51 +46,22 @@ jobs: - name: Generate config schema run: | - echo "πŸ”„ Generating fastmcp.json schema..." - - # Generate schema in docs/public for web access uv run python -c " from fastmcp.utilities.mcp_server_config import generate_schema generate_schema('docs/public/schemas/fastmcp.json/latest.json') - print('βœ… Latest schema generated in docs/public') - " - - # Also update the v1 schema in docs/public - uv run python -c " - from fastmcp.utilities.mcp_server_config import generate_schema generate_schema('docs/public/schemas/fastmcp.json/v1.json') - print('βœ… v1 schema generated in docs/public') - " - - # Generate schema in the source directory for local development - uv run python -c " - from fastmcp.utilities.mcp_server_config import generate_schema generate_schema('src/fastmcp/utilities/mcp_server_config/v1/schema.json') - print('βœ… Schema generated in utilities/mcp_server_config/v1/') " - - name: Create Pull Request - uses: peter-evans/create-pull-request@v8 - with: - token: ${{ steps.marvin-token.outputs.token }} - commit-message: "chore: Update fastmcp.json schema" - title: "chore: Update fastmcp.json schema" - body: | - This PR updates the fastmcp.json schema files to match the current source code. - - The schema is automatically generated from `src/fastmcp/utilities/mcp_server_config/` to ensure consistency. - - **Note:** This PR is fully automated and will update itself with any subsequent changes to the schema, or close automatically if the schema becomes up-to-date through other means. Feel free to leave it open until you're ready to merge. - - πŸ€– Generated by Marvin - branch: marvin/update-config-schema - labels: | - ignore in release notes - delete-branch: true - author: "marvin-context-protocol[bot] <225465937+marvin-context-protocol[bot]@users.noreply.github.com>" - committer: "marvin-context-protocol[bot] <225465937+marvin-context-protocol[bot]@users.noreply.github.com>" - - - name: Summary + - name: Commit and push if changed run: | - echo "βœ… Config schema generation workflow completed" - echo "PR will be created if there are changes, or closed if schema is already up to date" + git config user.name "marvin-context-protocol[bot]" + git config user.email "225465937+marvin-context-protocol[bot]@users.noreply.github.com" + git add docs/public/schemas/ src/fastmcp/utilities/mcp_server_config/v1/schema.json + if git diff --cached --quiet; then + echo "Config schema is up to date" + else + git commit -m "chore: Update fastmcp.json schema" + git push + echo "Config schema updated and pushed" + fi diff --git a/.github/workflows/update-sdk-docs.yml b/.github/workflows/update-sdk-docs.yml index 10baa1e3f5..122f6ddfc9 100644 --- a/.github/workflows/update-sdk-docs.yml +++ b/.github/workflows/update-sdk-docs.yml @@ -1,10 +1,10 @@ name: Update SDK Documentation -# This workflow runs on merges to main to automatically update SDK docs -# by creating a PR when changes are needed. +# Regenerates SDK docs on PRs and commits them back to the branch, +# so the PR is self-contained and main is correct after merge. on: - push: + pull_request: branches: ["main"] paths: - "src/**" @@ -13,16 +13,16 @@ on: permissions: contents: write - pull-requests: write jobs: update-sdk-docs: timeout-minutes: 5 runs-on: ubuntu-latest + if: >- + github.event_name == 'workflow_dispatch' || + github.event.pull_request.head.repo.full_name == github.repository steps: - - uses: actions/checkout@v6 - - name: Generate Marvin App token id: marvin-token uses: actions/create-github-app-token@v2 @@ -30,6 +30,11 @@ jobs: app-id: ${{ secrets.MARVIN_APP_ID }} private-key: ${{ secrets.MARVIN_APP_PRIVATE_KEY }} + - uses: actions/checkout@v6 + with: + ref: ${{ github.head_ref || github.ref }} + token: ${{ steps.marvin-token.outputs.token }} + - name: Install uv uses: astral-sh/setup-uv@v7 with: @@ -43,32 +48,17 @@ jobs: uses: extractions/setup-just@v3 - name: Generate SDK documentation - run: | - echo "πŸ”„ Generating SDK documentation..." - just api-ref-all - - - name: Create Pull Request - uses: peter-evans/create-pull-request@v8 - with: - token: ${{ steps.marvin-token.outputs.token }} - commit-message: "chore: Update SDK documentation" - title: "chore: Update SDK documentation" - body: | - This PR updates the auto-generated SDK documentation to reflect the latest source code changes. - - πŸ“š Documentation is automatically generated from the source code docstrings and type annotations. - - **Note:** This PR is fully automated and will update itself with any subsequent changes to the SDK, or close automatically if the documentation becomes up-to-date through other means. Feel free to leave it open until you're ready to merge. - - πŸ€– Generated by Marvin - branch: marvin/update-sdk-docs - labels: | - ignore in release notes - delete-branch: true - author: "marvin-context-protocol[bot] <225465937+marvin-context-protocol[bot]@users.noreply.github.com>" - committer: "marvin-context-protocol[bot] <225465937+marvin-context-protocol[bot]@users.noreply.github.com>" + run: just api-ref-all - - name: Summary + - name: Commit and push if changed run: | - echo "βœ… SDK documentation generation workflow completed" - echo "PR will be created if there are changes, or closed if documentation is already up to date" + git config user.name "marvin-context-protocol[bot]" + git config user.email "225465937+marvin-context-protocol[bot]@users.noreply.github.com" + git add docs/python-sdk/ + if git diff --cached --quiet; then + echo "SDK documentation is up to date" + else + git commit -m "chore: Update SDK documentation" + git push + echo "SDK documentation updated and pushed" + fi diff --git a/AGENTS.md b/AGENTS.md index 1d6ff4e9b2..c726c0504d 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -96,7 +96,7 @@ When modifying MCP functionality, changes typically need to be applied across al - Uses Mintlify framework - Files must be in docs.json to be included -- Never modify `docs/python-sdk/**` (auto-generated) +- Do not manually modify `docs/python-sdk/**` β€” a bot automatically updates these files via commits added to PRs - **Core Principle:** A feature doesn't exist unless it is documented! ### Documentation Guidelines diff --git a/README.md b/README.md index 5892d00b2f..86882e3cfa 100644 --- a/README.md +++ b/README.md @@ -64,7 +64,7 @@ These compose cleanly, so complex patterns don't require complex code. And becau ## Installation > [!Note] -> FastMCP 3.0 is currently in beta. Install with: `pip install fastmcp==3.0.0b2` +> FastMCP 3.0 is currently a release candidate. Install with: `pip install fastmcp==3.0.0rc1` > > For production systems requiring stability, pin to v2: `pip install 'fastmcp<3'` diff --git a/docs/changelog.mdx b/docs/changelog.mdx index de6afbe6a5..cf25fdfec4 100644 --- a/docs/changelog.mdx +++ b/docs/changelog.mdx @@ -4,6 +4,186 @@ icon: "list-check" rss: true --- + + +**[v3.0.0rc1: RC-ing is Believing](https://github.com/jlowin/fastmcp/releases/tag/v3.0.0rc1)** + +FastMCP 3 RC1 means we believe the API is stable. Beta 2 drew a wave of real-world adoption β€” production deployments, migration reports, integration testing β€” and the feedback overwhelmingly confirmed that the architecture works. This release closes gaps that surfaced under load: auth flows that needed to be async, background tasks that needed reliable notification delivery, and APIs still carrying beta-era naming. If nothing unexpected surfaces, this is what 3.0.0 looks like. + +🚨 **Breaking Changes** β€” The `ui=` parameter is now `app=` with a unified `AppConfig` class (matching the feature's actual name), and 16 `FastMCP()` constructor kwargs have finally been removed. If you've been ignoring months of deprecation warnings, you'll get a `TypeError` with specific migration instructions. + +πŸ” **Auth Improvements** β€” Three changes that together round out FastMCP's auth story for production. `auth=` checks can now be `async`, so you can hit databases or external services during authorization β€” previously, passing an async function silently passed because the unawaited coroutine was truthy. Static Client Registration lets clients provide a pre-registered `client_id`/`client_secret` directly, bypassing DCR for servers that don't support it. And Azure OBO flows are now declarative via dependency injection: + +```python +from fastmcp.server.auth.providers.azure import EntraOBOToken + +@mcp.tool() +async def get_emails( + graph_token: str = EntraOBOToken(["https://graph.microsoft.com/Mail.Read"]), +): + # OBO exchange already happened β€” just use the token + ... +``` + +⚑ **Concurrent Sampling** β€” When an LLM returns multiple tool calls in a single response, `context.sample()` can now execute them in parallel. Opt in with `tool_concurrency=0` for unlimited parallelism, or set a bound. Tools that aren't safe to parallelize can declare `sequential=True`. + +πŸ“‘ **Background Task Notifications** β€” Background tasks now reliably push progress updates and elicit user input through the standard MCP protocol. A distributed Redis queue replaces polling (7,200 round-trips/hour β†’ one blocking call), and `ctx.elicit()` in background tasks automatically relays through the client's standard `elicitation_handler`. + +βœ… **OpenAPI Output Validation** β€” When backends don't conform to their own OpenAPI schemas, the MCP SDK rejects the response and the tool fails. `validate_output=False` disables strict schema checking while still passing structured JSON to clients β€” a necessary escape hatch for imperfect APIs. + +## What's Changed +### Enhancements πŸ”§ +* generate-cli: auto-generate SKILL.md agent skill by [@jlowin](https://github.com/jlowin) in [#3115](https://github.com/jlowin/fastmcp/pull/3115) +* Scope Martian triage to bug-labeled issues for jlowin by [@jlowin](https://github.com/jlowin) in [#3124](https://github.com/jlowin/fastmcp/pull/3124) +* Add Azure OBO dependencies, auth token injection, and documentation by [@jlowin](https://github.com/jlowin) in [#2918](https://github.com/jlowin/fastmcp/pull/2918) +* feat: add Static Client Registration (#3085) by [@martimfasantos](https://github.com/martimfasantos) in [#3086](https://github.com/jlowin/fastmcp/pull/3086) +* Add concurrent tool execution with sequential flag by [@strawgate](https://github.com/strawgate) in [#3022](https://github.com/jlowin/fastmcp/pull/3022) +* Add validate_output option for OpenAPI tools by [@jlowin](https://github.com/jlowin) in [#3134](https://github.com/jlowin/fastmcp/pull/3134) +* Relay task elicitation through standard MCP protocol by [@chrisguidry](https://github.com/chrisguidry) in [#3136](https://github.com/jlowin/fastmcp/pull/3136) +* Bump py-key-value-aio to `>=0.4.0,<0.5.0` by [@strawgate](https://github.com/strawgate) in [#3143](https://github.com/jlowin/fastmcp/pull/3143) +* Support async auth checks by [@jlowin](https://github.com/jlowin) in [#3152](https://github.com/jlowin/fastmcp/pull/3152) +* Make $ref dereferencing optional via FastMCP(dereference_refs=...) by [@jlowin](https://github.com/jlowin) in [#3151](https://github.com/jlowin/fastmcp/pull/3151) +* Expose local_provider property, deprecate FastMCP.remove_tool() by [@jlowin](https://github.com/jlowin) in [#3155](https://github.com/jlowin/fastmcp/pull/3155) +* Add helpers for converting FunctionTool and TransformedTool to SamplingTool by [@strawgate](https://github.com/strawgate) in [#3062](https://github.com/jlowin/fastmcp/pull/3062) +* Updates to github actions / workflows for claude by [@strawgate](https://github.com/strawgate) in [#3157](https://github.com/jlowin/fastmcp/pull/3157) +### Fixes 🐞 +* Updated deprecation URL for V3 by [@SrzStephen](https://github.com/SrzStephen) in [#3108](https://github.com/jlowin/fastmcp/pull/3108) +* Fix Windows test timeouts in OAuth proxy provider tests by [@strawgate](https://github.com/strawgate) in [#3123](https://github.com/jlowin/fastmcp/pull/3123) +* Fix session visibility marks leaking across sessions by [@jlowin](https://github.com/jlowin) in [#3132](https://github.com/jlowin/fastmcp/pull/3132) +* Fix unhandled exceptions in OpenAPI POST tool calls by [@jlowin](https://github.com/jlowin) in [#3133](https://github.com/jlowin/fastmcp/pull/3133) +* feat: distributed notification queue + BLPOP elicitation for background tasks by [@gfortaine](https://github.com/gfortaine) in [#2906](https://github.com/jlowin/fastmcp/pull/2906) +* fix: snapshot access token for background tasks (#3095) by [@gfortaine](https://github.com/gfortaine) in [#3138](https://github.com/jlowin/fastmcp/pull/3138) +* Stop duplicating path parameter descriptions into tool prose by [@jlowin](https://github.com/jlowin) in [#3149](https://github.com/jlowin/fastmcp/pull/3149) +* fix: guard client pagination loops against misbehaving servers by [@jlowin](https://github.com/jlowin) in [#3167](https://github.com/jlowin/fastmcp/pull/3167) +* Fix stale get_* references in docs and examples by [@jlowin](https://github.com/jlowin) in [#3168](https://github.com/jlowin/fastmcp/pull/3168) +* Support non-serializable values in Context.set_state by [@jlowin](https://github.com/jlowin) in [#3171](https://github.com/jlowin/fastmcp/pull/3171) +* Fix stale request context in StatefulProxyClient handlers by [@jlowin](https://github.com/jlowin) in [#3172](https://github.com/jlowin/fastmcp/pull/3172) +### Breaking Changes πŸ›« +* Rename ui= to app= and consolidate ToolUI/ResourceUI into AppConfig by [@jlowin](https://github.com/jlowin) in [#3117](https://github.com/jlowin/fastmcp/pull/3117) +* Remove deprecated FastMCP() constructor kwargs by [@jlowin](https://github.com/jlowin) in [#3148](https://github.com/jlowin/fastmcp/pull/3148) +### Docs πŸ“š +* Update docs to reference beta 2 by [@jlowin](https://github.com/jlowin) in [#3112](https://github.com/jlowin/fastmcp/pull/3112) +* docs: add pre-registered OAuth clients to v3-features by [@jlowin](https://github.com/jlowin) in [#3129](https://github.com/jlowin/fastmcp/pull/3129) +### Dependencies πŸ“¦ +* chore(deps): bump cryptography from 46.0.3 to 46.0.5 in /examples/testing_demo in the uv group across 1 directory by @dependabot in [#3140](https://github.com/jlowin/fastmcp/pull/3140) +### Other Changes 🦾 +* docs: add v3.0.0rc1 features to v3-features tracking by [@jlowin](https://github.com/jlowin) in [#3145](https://github.com/jlowin/fastmcp/pull/3145) +* docs: remove nonexistent MSALApp from rc1 notes by [@jlowin](https://github.com/jlowin) in [#3146](https://github.com/jlowin/fastmcp/pull/3146) + +## New Contributors +* [@martimfasantos](https://github.com/martimfasantos) made their first contribution in [#3086](https://github.com/jlowin/fastmcp/pull/3086) + +**Full Changelog**: https://github.com/jlowin/fastmcp/compare/v3.0.0b2...v3.0.0rc1 + + + + + +**[v3.0.0b2: 2 Fast 2 Beta](https://github.com/jlowin/fastmcp/releases/tag/v3.0.0b2)** + +FastMCP 3 Beta 2 reflects the huge number of people that kicked the tires on Beta 1. Seven new contributors landed changes in this release, and early migration reports went smoother than expected, including teams on Prefect Horizon upgrading from v2. Most of Beta 2 is refinement: fixing what people found, filling gaps from real usage, hardening edges. But a few new features did land along the way. + +πŸ–₯️ **Client CLI** β€” `fastmcp list`, `fastmcp call`, `fastmcp discover`, and `fastmcp generate-cli` turn any MCP server into something you can poke at from a terminal. Discover servers configured in Claude Desktop, Cursor, Goose, or project-level `mcp.json` files and reference them by name. `generate-cli` reads a server's schemas and writes a standalone typed CLI script where every tool is a proper subcommand with flags and help text. + +πŸ” **CIMD** (Client ID Metadata Documents) adds an alternative to Dynamic Client Registration for OAuth. Clients host a static JSON document at an HTTPS URL; that URL becomes the `client_id`. Server-side support includes SSRF-hardened fetching, cache-aware revalidation, and `private_key_jwt` validation. Enabled by default on `OAuthProxy`. + +πŸ“± **MCP Apps** β€” Spec-level compliance for the MCP Apps extension: `ui://` resource scheme, typed UI metadata on tools and resources, extension negotiation, and `ctx.client_supports_extension()` for runtime detection. + +⏳ **Background Task Context** β€” `Context` now works transparently in Docket workers. `ctx.elicit()` routes through Redis-based coordination so background tasks can pause for user input without any code changes. + +πŸ›‘οΈ **ResponseLimitingMiddleware** caps tool response sizes with UTF-8-safe truncation for text and schema-aware error handling for structured outputs. + +πŸͺΏ **Goose Integration** β€” `fastmcp install goose` generates deeplink URLs for one-command server installation into Goose. + +## What's Changed +### New Features πŸŽ‰ +* Add MCP Apps Phase 1 β€” SDK compatibility (SEP-1865) by [@jlowin](https://github.com/jlowin) in [#3009](https://github.com/jlowin/fastmcp/pull/3009) +* Add `fastmcp list` and `fastmcp call` CLI commands by [@jlowin](https://github.com/jlowin) in [#3054](https://github.com/jlowin/fastmcp/pull/3054) +* Add `fastmcp generate-cli` command by [@jlowin](https://github.com/jlowin) in [#3065](https://github.com/jlowin/fastmcp/pull/3065) +* Add CIMD (Client ID Metadata Document) support for OAuth by [@jlowin](https://github.com/jlowin) in [#2871](https://github.com/jlowin/fastmcp/pull/2871) +### Enhancements πŸ”§ +* Make duplicate bot less aggressive by [@jlowin](https://github.com/jlowin) in [#2981](https://github.com/jlowin/fastmcp/pull/2981) +* Remove uv lockfile monitoring from Dependabot by [@jlowin](https://github.com/jlowin) in [#2986](https://github.com/jlowin/fastmcp/pull/2986) +* Run static checks with --upgrade, remove lockfile check by [@jlowin](https://github.com/jlowin) in [#2988](https://github.com/jlowin/fastmcp/pull/2988) +* Adjust workflow triggers for Marvin by [@strawgate](https://github.com/strawgate) in [#3010](https://github.com/jlowin/fastmcp/pull/3010) +* Move tests to a reusable action and enable nightly checks by [@strawgate](https://github.com/strawgate) in [#3017](https://github.com/jlowin/fastmcp/pull/3017) +* feat: option to add upstream claims to the FastMCP proxy JWT by [@JonasKs](https://github.com/JonasKs) in [#2997](https://github.com/jlowin/fastmcp/pull/2997) +* Fix ty 0.0.14 compatibility and upgrade dependencies by [@jlowin](https://github.com/jlowin) in [#3027](https://github.com/jlowin/fastmcp/pull/3027) +* fix: automatically include offline_access as a scope in the Azure provider to enable automatic token refreshing by [@JonasKs](https://github.com/JonasKs) in [#3001](https://github.com/jlowin/fastmcp/pull/3001) +* feat: expand --reload to watch frontend file types by [@jlowin](https://github.com/jlowin) in [#3028](https://github.com/jlowin/fastmcp/pull/3028) +* Add `fastmcp install stdio` command by [@jlowin](https://github.com/jlowin) in [#3032](https://github.com/jlowin/fastmcp/pull/3032) +* Update martian-issue-triage.yml for Workflow editing guidance by [@strawgate](https://github.com/strawgate) in [#3033](https://github.com/jlowin/fastmcp/pull/3033) +* feat: Goose integration + dedicated install command by [@jlowin](https://github.com/jlowin) in [#3040](https://github.com/jlowin/fastmcp/pull/3040) +* Fixing spelling issues in multiple files by [@didier-durand](https://github.com/didier-durand) in [#2996](https://github.com/jlowin/fastmcp/pull/2996) +* Add `fastmcp discover` and name-based server resolution by [@jlowin](https://github.com/jlowin) in [#3055](https://github.com/jlowin/fastmcp/pull/3055) +* feat(context): Add background task support for Context (SEP-1686) by [@gfortaine](https://github.com/gfortaine) in [#2905](https://github.com/jlowin/fastmcp/pull/2905) +* Add server version to banner by [@richardkmichael](https://github.com/richardkmichael) in [#3076](https://github.com/jlowin/fastmcp/pull/3076) +* Add @handle_tool_errors decorator for standardized error handling by [@dgenio](https://github.com/dgenio) in [#2885](https://github.com/jlowin/fastmcp/pull/2885) +* Update Anthropic and OpenAI clients to use Omit instead of NotGiven by [@jlowin](https://github.com/jlowin) in [#3088](https://github.com/jlowin/fastmcp/pull/3088) +* Add ResponseLimitingMiddleware for tool response size control by [@dgenio](https://github.com/dgenio) in [#3072](https://github.com/jlowin/fastmcp/pull/3072) +* Infer MIME types from OpenAPI response definitions by [@jlowin](https://github.com/jlowin) in [#3101](https://github.com/jlowin/fastmcp/pull/3101) +* Remove require_auth in favor of scope-based authorization by [@jlowin](https://github.com/jlowin) in [#3103](https://github.com/jlowin/fastmcp/pull/3103) +### Fixes 🐞 +* Fix FastAPI mounting examples in docs by [@jlowin](https://github.com/jlowin) in [#2962](https://github.com/jlowin/fastmcp/pull/2962) +* Remove outdated 'FastMCP 3.0 is coming!' CLI banner by [@jlowin](https://github.com/jlowin) in [#2974](https://github.com/jlowin/fastmcp/pull/2974) +* Pin httpx `< 1.0` and simplify beta install docs by [@jlowin](https://github.com/jlowin) in [#2975](https://github.com/jlowin/fastmcp/pull/2975) +* Add enabled field to ToolTransformConfig by [@jlowin](https://github.com/jlowin) in [#2991](https://github.com/jlowin/fastmcp/pull/2991) +* fix phue2 import in smart_home example by [@zzstoatzz](https://github.com/zzstoatzz) in [#2999](https://github.com/jlowin/fastmcp/pull/2999) +* fix: broaden combine_lifespans type to accept Mapping return types by [@aminsamir45](https://github.com/aminsamir45) in [#3005](https://github.com/jlowin/fastmcp/pull/3005) +* fix: type narrowing for skills resource contents by [@strawgate](https://github.com/strawgate) in [#3023](https://github.com/jlowin/fastmcp/pull/3023) +* fix: correctly send resource when exchanging code for the upstream by [@JonasKs](https://github.com/JonasKs) in [#3013](https://github.com/jlowin/fastmcp/pull/3013) +* MCP Apps: structured CSP/permissions types, resource meta propagation fix, QR example by [@jlowin](https://github.com/jlowin) in [#3031](https://github.com/jlowin/fastmcp/pull/3031) +* chore: upgrade python-multipart to 0.0.22 (CVE-2026-24486) by [@jlowin](https://github.com/jlowin) in [#3042](https://github.com/jlowin/fastmcp/pull/3042) +* chore: upgrade protobuf to 6.33.5 (CVE-2026-0994) by [@jlowin](https://github.com/jlowin) in [#3043](https://github.com/jlowin/fastmcp/pull/3043) +* fix: use MCP spec error code -32002 for resource not found by [@jlowin](https://github.com/jlowin) in [#3041](https://github.com/jlowin/fastmcp/pull/3041) +* Fix tool_choice reset for structured output sampling by [@strawgate](https://github.com/strawgate) in [#3014](https://github.com/jlowin/fastmcp/pull/3014) +* Fix workflow notification URL formatting in upgrade checks by [@strawgate](https://github.com/strawgate) in [#3047](https://github.com/jlowin/fastmcp/pull/3047) +* Fix Field() handling in prompts by [@strawgate](https://github.com/strawgate) in [#3050](https://github.com/jlowin/fastmcp/pull/3050) +* fix: use SkipJsonSchema to exclude callable fields from JSON schema generation by [@strawgate](https://github.com/strawgate) in [#3048](https://github.com/jlowin/fastmcp/pull/3048) +* fix: Preserve metadata in FastMCPProvider component wrappers by [@NeelayS](https://github.com/NeelayS) in [#3057](https://github.com/jlowin/fastmcp/pull/3057) +* Mock network calls in CLI tests and use MemoryStore for OAuth tests by [@strawgate](https://github.com/strawgate) in [#3051](https://github.com/jlowin/fastmcp/pull/3051) +* Remove OpenAPI timeout parameter, make client optional, surface timeout errors by [@jlowin](https://github.com/jlowin) in [#3067](https://github.com/jlowin/fastmcp/pull/3067) +* fix: enforce redirect URI validation when allowed_client_redirect_uris is supplied by [@nathanwelsh8](https://github.com/nathanwelsh8) in [#3066](https://github.com/jlowin/fastmcp/pull/3066) +* Fix --reload port conflict when using explicit port by [@jlowin](https://github.com/jlowin) in [#3070](https://github.com/jlowin/fastmcp/pull/3070) +* Fix compress_schema to preserve additionalProperties: false for MCP compatibility by [@jlowin](https://github.com/jlowin) in [#3102](https://github.com/jlowin/fastmcp/pull/3102) +* Fix CIMD redirect allowlist bypass and cache revalidation by [@jlowin](https://github.com/jlowin) in [#3098](https://github.com/jlowin/fastmcp/pull/3098) +* Exclude content-type from get_http_headers() to prevent HTTP 415 errors by [@jlowin](https://github.com/jlowin) in [#3104](https://github.com/jlowin/fastmcp/pull/3104) +### Docs πŸ“š +* Prepare docs for v3.0 beta release by [@jlowin](https://github.com/jlowin) in [#2954](https://github.com/jlowin/fastmcp/pull/2954) +* Restructure docs: move transforms to dedicated section by [@jlowin](https://github.com/jlowin) in [#2956](https://github.com/jlowin/fastmcp/pull/2956) +* Remove unnecessary pip warning by [@jlowin](https://github.com/jlowin) in [#2958](https://github.com/jlowin/fastmcp/pull/2958) +* Update example MCP version in installation docs by [@jlowin](https://github.com/jlowin) in [#2959](https://github.com/jlowin/fastmcp/pull/2959) +* Update brand images by [@jlowin](https://github.com/jlowin) in [#2960](https://github.com/jlowin/fastmcp/pull/2960) +* Restructure README and welcome page with motivated narrative by [@jlowin](https://github.com/jlowin) in [#2963](https://github.com/jlowin/fastmcp/pull/2963) +* Restructure README and docs with motivated narrative by [@jlowin](https://github.com/jlowin) in [#2964](https://github.com/jlowin/fastmcp/pull/2964) +* Favicon update and Prefect Horizon docs by [@jlowin](https://github.com/jlowin) in [#2978](https://github.com/jlowin/fastmcp/pull/2978) +* Add dependency injection documentation and DI-style dependencies by [@jlowin](https://github.com/jlowin) in [#2980](https://github.com/jlowin/fastmcp/pull/2980) +* docs: document expanded reload behavior and restructure beta sections by [@jlowin](https://github.com/jlowin) in [#3039](https://github.com/jlowin/fastmcp/pull/3039) +* Add output_schema caveat to response limiting docs by [@jlowin](https://github.com/jlowin) in [#3099](https://github.com/jlowin/fastmcp/pull/3099) +* Document token passthrough security in OAuth Proxy docs by [@jlowin](https://github.com/jlowin) in [#3100](https://github.com/jlowin/fastmcp/pull/3100) +### Dependencies πŸ“¦ +* Bump ty from 0.0.12 to 0.0.13 by @dependabot in [#2984](https://github.com/jlowin/fastmcp/pull/2984) +* Bump prek from 0.2.30 to 0.3.0 by @dependabot in [#2982](https://github.com/jlowin/fastmcp/pull/2982) +### Other Changes 🦾 +* Normalize resource URLs before comparison to support RFC 8707 query parameters by [@abhijeethp](https://github.com/abhijeethp) in [#2967](https://github.com/jlowin/fastmcp/pull/2967) +* Bump pydocket to 0.17.2 (memory leak fix) by [@chrisguidry](https://github.com/chrisguidry) in [#2998](https://github.com/jlowin/fastmcp/pull/2998) +* Add AzureJWTVerifier for Managed Identity token verification by [@jlowin](https://github.com/jlowin) in [#3058](https://github.com/jlowin/fastmcp/pull/3058) +* Add release notes for v2.14.4 and v2.14.5 by [@jlowin](https://github.com/jlowin) in [#3064](https://github.com/jlowin/fastmcp/pull/3064) +* Add missing beta2 features to v3 release tracking by [@jlowin](https://github.com/jlowin) in [#3105](https://github.com/jlowin/fastmcp/pull/3105) + +## New Contributors +* [@abhijeethp](https://github.com/abhijeethp) made their first contribution in [#2967](https://github.com/jlowin/fastmcp/pull/2967) +* [@aminsamir45](https://github.com/aminsamir45) made their first contribution in [#3005](https://github.com/jlowin/fastmcp/pull/3005) +* [@JonasKs](https://github.com/JonasKs) made their first contribution in [#2997](https://github.com/jlowin/fastmcp/pull/2997) +* [@NeelayS](https://github.com/NeelayS) made their first contribution in [#3057](https://github.com/jlowin/fastmcp/pull/3057) +* [@gfortaine](https://github.com/gfortaine) made their first contribution in [#2905](https://github.com/jlowin/fastmcp/pull/2905) +* [@nathanwelsh8](https://github.com/nathanwelsh8) made their first contribution in [#3066](https://github.com/jlowin/fastmcp/pull/3066) +* [@dgenio](https://github.com/dgenio) made their first contribution in [#2885](https://github.com/jlowin/fastmcp/pull/2885) + +**Full Changelog**: https://github.com/jlowin/fastmcp/compare/v3.0.0b1...v3.0.0b2 + + + **[v3.0.0b1: This Beta Work](https://github.com/jlowin/fastmcp/releases/tag/v3.0.0b1)** diff --git a/docs/clients/auth/oauth.mdx b/docs/clients/auth/oauth.mdx index 25804adc3f..84fbe2164d 100644 --- a/docs/clients/auth/oauth.mdx +++ b/docs/clients/auth/oauth.mdx @@ -55,6 +55,8 @@ You don't need to pass `mcp_url` when using `OAuth` with `Client(auth=...)` β€” - **`scopes`** (`str | list[str]`, optional): OAuth scopes to request. Can be space-separated string or list of strings - **`client_name`** (`str`, optional): Client name for dynamic registration. Defaults to `"FastMCP Client"` +- **`client_id`** (`str`, optional): Pre-registered OAuth client ID. When provided, skips Dynamic Client Registration entirely. See [Pre-Registered Clients](#pre-registered-clients) +- **`client_secret`** (`str`, optional): OAuth client secret for pre-registered clients. Optional β€” public clients that rely on PKCE can omit this - **`client_metadata_url`** (`str`, optional): URL-based client identity (CIMD). See [CIMD Authentication](/clients/auth/cimd) for details - **`token_storage`** (`AsyncKeyValue`, optional): Storage backend for persisting OAuth tokens. Defaults to in-memory storage (tokens lost on restart). See [Token Storage](#token-storage) for encrypted storage options - **`additional_client_metadata`** (`dict[str, Any]`, optional): Extra metadata for client registration @@ -74,7 +76,7 @@ The client first checks the configured `token_storage` backend for existing, val If no valid tokens exist, the client attempts to discover the OAuth server's endpoints using a well-known URI (e.g., `/.well-known/oauth-authorization-server`) based on the `mcp_url`. -If the OAuth server supports it and the client isn't already registered (or credentials aren't cached), the client performs dynamic client registration according to RFC 7591. Alternatively, if a `client_metadata_url` is configured and the server supports CIMD, the client uses its metadata URL as its identity instead of registering. +If a `client_id` is provided, the client uses those pre-registered credentials directly and skips this step entirely. Otherwise, if a `client_metadata_url` is configured and the server supports CIMD, the client uses its metadata URL as its identity. As a fallback, the client performs Dynamic Client Registration (RFC 7591) if the server supports it. A temporary local HTTP server is started on an available port (or the port specified via `callback_port`). This server's address (e.g., `http://127.0.0.1:/callback`) acts as the `redirect_uri` for the OAuth flow. @@ -152,3 +154,33 @@ async with Client( ``` See the [CIMD Authentication](/clients/auth/cimd) page for complete documentation on creating, hosting, and validating CIMD documents. + +## Pre-Registered Clients + + + +Some OAuth servers don't support Dynamic Client Registration β€” the MCP spec explicitly makes DCR optional. If your client has been pre-registered with the server (you already have a `client_id` and optionally a `client_secret`), you can provide them directly to skip DCR entirely. + +```python +from fastmcp import Client +from fastmcp.client.auth import OAuth + +async with Client( + "https://mcp-server.example.com/mcp", + auth=OAuth( + client_id="my-registered-client-id", + client_secret="my-client-secret", + ), +) as client: + await client.ping() +``` + +Public clients that rely on PKCE for security can omit `client_secret`: + +```python +oauth = OAuth(client_id="my-public-client-id") +``` + + +When using pre-registered credentials, the client will not attempt Dynamic Client Registration. If the server rejects the credentials, the error is surfaced immediately rather than falling back to DCR. + diff --git a/docs/development/upgrade-guide.mdx b/docs/development/upgrade-guide.mdx index 8bfe713af2..70a60b8303 100644 --- a/docs/development/upgrade-guide.mdx +++ b/docs/development/upgrade-guide.mdx @@ -43,9 +43,9 @@ tool.disable() server.disable(names={"my_tool"}, components=["tool"]) ``` -#### Listing Methods Return Lists +#### Listing Methods Renamed and Return Lists -`get_tools()`, `get_resources()`, `get_prompts()`, and `get_resource_templates()` now return lists instead of dicts: +`get_tools()`, `get_resources()`, `get_prompts()`, and `get_resource_templates()` have been replaced by `list_tools()`, `list_resources()`, `list_prompts()`, and `list_resource_templates()`. The new methods return lists instead of dicts: ```python # Before @@ -53,7 +53,7 @@ tools = await server.get_tools() tool = tools["my_tool"] # After -tools = await server.get_tools() +tools = await server.list_tools() tool = next((t for t in tools if t.name == "my_tool"), None) ``` @@ -91,6 +91,20 @@ await ctx.set_state("key", "value") value = await ctx.get_state("key") ``` +#### State Values Must Be Serializable + +Session state values must now be JSON-serializable by default (dicts, lists, strings, numbers, etc.), since state is persisted across requests using a pluggable storage backend. + +If you need to store non-serializable values (e.g., passing an HTTP client from middleware to a tool), use `serializable=False`. These values are request-scoped and only available during the current tool call, resource read, or prompt render: + +```python +# Middleware sets up a client for the current request +await ctx.set_state("client", my_http_client, serializable=False) + +# Tool retrieves it in the same request +client = await ctx.get_state("client") +``` + #### Server Banner Environment Variable Renamed `FASTMCP_SHOW_CLI_BANNER` is now `FASTMCP_SHOW_SERVER_BANNER`. diff --git a/docs/development/v3-notes/v3-features.mdx b/docs/development/v3-notes/v3-features.mdx index 90d72f4165..ae6bfc27da 100644 --- a/docs/development/v3-notes/v3-features.mdx +++ b/docs/development/v3-notes/v3-features.mdx @@ -4,6 +4,128 @@ title: v3.0 Feature Tracking This document tracks major features in FastMCP v3.0 for release notes preparation. +## 3.0.0rc1 + +### SamplingTool Conversion Helpers + +Server tools (FunctionTool and TransformedTool) can now be passed directly to sampling methods via `SamplingTool.from_callable_tool()` ([#3062](https://github.com/jlowin/fastmcp/pull/3062)). Previously, tools defined with `@mcp.tool` had to be recreated as functions for use in `ctx.sample()`. Now `ctx.sample()` and `ctx.sample_step()` accept these tool instances directly. + +```python +@mcp.tool +def search(query: str) -> str: + """Search the web.""" + return do_search(query) + +# Use tool directly in sampling +result = await ctx.sample( + "Research Python frameworks", + tools=[search] # FunctionTool works directly! +) +``` + +### Concurrent Tool Execution in Sampling + +When an LLM returns multiple tool calls in a single sampling response, they can now be executed concurrently ([#3022](https://github.com/jlowin/fastmcp/pull/3022)). Default behavior remains sequential; opt in with `tool_concurrency`. Tools can declare `sequential=True` to force sequential execution even when concurrency is enabled. + +```python +result = await context.sample( + messages="Fetch weather for NYC and LA", + tools=[fetch_weather], + tool_concurrency=0, # Unlimited parallel execution +) +``` + +### OpenAPI `validate_output` Option + +`OpenAPIProvider` and `FastMCP.from_openapi()` now accept `validate_output=False` to skip output schema validation ([#3134](https://github.com/jlowin/fastmcp/pull/3134)). Useful when backends don't conform to their own OpenAPI response schemas β€” structured JSON still flows through, only the strict schema checking is disabled. + +```python +mcp = FastMCP.from_openapi( + openapi_spec=spec, + client=client, + validate_output=False, +) +``` + +### Auth Token Injection and Azure OBO Dependencies + +New dependency injection for accessing the authenticated user's token directly in tool parameters ([#2918](https://github.com/jlowin/fastmcp/pull/2918)). Works with any auth provider. + +```python +from fastmcp.server.dependencies import CurrentAccessToken, TokenClaim +from fastmcp.server.auth import AccessToken + +@mcp.tool() +async def my_tool( + token: AccessToken = CurrentAccessToken, + user_id: str = TokenClaim("oid"), +): ... +``` + +For Azure/Entra, the new `fastmcp[azure]` extra adds `EntraOBOToken`, which handles the On-Behalf-Of token exchange declaratively: + +```python +from fastmcp.server.auth.providers.azure import EntraOBOToken + +@mcp.tool() +async def get_emails( + graph_token: str = EntraOBOToken(["https://graph.microsoft.com/Mail.Read"]), +): + # graph_token is ready β€” OBO exchange happened automatically + ... +``` + +### `generate-cli` Agent Skill Generation + +`fastmcp generate-cli` now produces a `SKILL.md` alongside the CLI script ([#3115](https://github.com/jlowin/fastmcp/pull/3115)) β€” a Claude Code agent skill with pre-computed invocation syntax for every tool. Agents reading the skill can call tools immediately without running `--help`. On by default; pass `--no-skill` to opt out. + +### Background Task Notification Queue + +Background tasks now use a distributed Redis notification queue for reliable delivery ([#2906](https://github.com/jlowin/fastmcp/pull/2906)). Elicitation switches from polling to BLPOP (single blocking call instead of ~7,200 round-trips/hour), and notification delivery retries up to 3x with TTL-based expiration. + +### Async Auth Checks + +Auth check functions can now be `async`, enabling authorization decisions that depend on asynchronous operations like reading server state via `Context.get_state` or calling external services ([#3150](https://github.com/jlowin/fastmcp/issues/3150)). Sync and async checks can be freely mixed. Previously, passing an async function as an auth check would silently pass (coroutine objects are truthy). + +### Optional `$ref` Dereferencing in Schemas + +Schema `$ref` dereferencing β€” which inlines all `$defs` for compatibility with MCP clients that don't handle `$ref` β€” is now controlled by the `dereference_schemas` constructor kwarg ([#3141](https://github.com/jlowin/fastmcp/issues/3141)). Default is `True` (dereference on) because the non-compliant clients are popular and the failure mode is silent breakage that server authors can't diagnose. Opt out when you know your clients handle `$ref` and want smaller schemas: + +```python +mcp = FastMCP("my-server", dereference_schemas=False) +``` + +Dereferencing is implemented as middleware (`DereferenceRefsMiddleware`) that runs at serve-time, so schemas are stored with `$ref` intact and only inlined when sent to clients. + +### Breaking: Deprecated `FastMCP()` Constructor Kwargs Removed + +Sixteen deprecated keyword arguments have been removed from `FastMCP.__init__`. Passing any of them now raises `TypeError` with a migration hint. Environment variables (e.g., `FASTMCP_HOST`) continue to work β€” only the constructor kwargs moved. + +**Transport/server settings** (`host`, `port`, `log_level`, `debug`, `sse_path`, `message_path`, `streamable_http_path`, `json_response`, `stateless_http`): Pass to `run()`, `run_http_async()`, or `http_app()` as appropriate, or set via environment variables. + +```python +# Before +mcp = FastMCP("server", host="0.0.0.0", port=8080) +mcp.run() + +# After +mcp = FastMCP("server") +mcp.run(transport="http", host="0.0.0.0", port=8080) +``` + +**Duplicate handling** (`on_duplicate_tools`, `on_duplicate_resources`, `on_duplicate_prompts`): Use the unified `on_duplicate=` parameter. + +**Tag filtering** (`include_tags`, `exclude_tags`): Use `server.enable(tags=..., only=True)` and `server.disable(tags=...)` after construction. + +**Tool serializer** (`tool_serializer`): Return `ToolResult` from tools instead. + +**Tool transformations** (`tool_transformations`): Use `server.add_transform(ToolTransform(...))` after construction. + +The `_deprecated_settings` attribute and `.settings` property are also removed. `ExperimentalSettings` has been deleted (dead code). + +### Breaking: `ui=` Renamed to `app=` + +The MCP Apps decorator parameter has been renamed from `ui=ToolUI(...)` / `ui=ResourceUI(...)` to `app=AppConfig(...)` ([#3117](https://github.com/jlowin/fastmcp/pull/3117)). `ToolUI` and `ResourceUI` are consolidated into a single `AppConfig` class. Wire format is unchanged. See the MCP Apps section under beta2 for full details. ## 3.0.0beta2 ### CLI: `fastmcp list` and `fastmcp call` @@ -120,6 +242,29 @@ Key details: Documentation: [CIMD Authentication](/clients/auth/cimd), [OAuth Proxy CIMD config](/servers/auth/oauth-proxy#cimd-support) +### Pre-Registered OAuth Clients + +The `OAuth` client helper now accepts `client_id` and `client_secret` parameters for servers where the client is already registered ([#3086](https://github.com/jlowin/fastmcp/pull/3086)). This bypasses Dynamic Client Registration entirely β€” useful when DCR is disabled, or when the server has pre-provisioned credentials for your application. + +```python +from fastmcp import Client +from fastmcp.client.auth import OAuth + +async with Client( + "https://mcp-server.example.com/mcp", + auth=OAuth( + client_id="my-registered-app", + client_secret="my-secret", + scopes=["read", "write"], + ), +) as client: + await client.ping() +``` + +The static credentials are injected before the OAuth flow begins, so the client never attempts DCR. If the server rejects the credentials, the error surfaces immediately rather than retrying with fresh registration (which can't help for fixed credentials). Public clients can omit `client_secret`. + +Documentation: [Pre-Registered Clients](/clients/auth/oauth#pre-registered-clients) + ### CLI: `fastmcp generate-cli` `fastmcp generate-cli` connects to any MCP server, reads its tool schemas, and writes a standalone Python CLI script where every tool becomes a typed subcommand with flags, help text, and tab completion ([#3065](https://github.com/jlowin/fastmcp/pull/3065)). The insight is that MCP tool schemas already contain everything a CLI framework needs β€” parameter names, types, descriptions, required/optional status β€” so the generator maps JSON Schema directly into [cyclopts](https://cyclopts.readthedocs.io/) commands. @@ -1177,35 +1322,9 @@ main.mount(subserver, prefix="api") main.mount(subserver, namespace="api") ``` -#### Tag Filtering Init Parameters - -`FastMCP(include_tags=..., exclude_tags=...)` deprecated. Use `enable()`/`disable()` methods: - -```python -# Deprecated -mcp = FastMCP("server", exclude_tags={"internal"}) +#### Tag Filtering, Tool Serializer, Tool Transformations Init Parameters -# New -mcp = FastMCP("server") -mcp.disable(tags={"internal"}) -``` - -#### Tool Serializer Parameter - -The `tool_serializer` parameter on `FastMCP` is deprecated. Return `ToolResult` for explicit serialization control. - -#### Tool Transformation Methods - -`add_tool_transformation()`, `remove_tool_transformation()`, and `tool_transformations` constructor parameter are deprecated. Use `add_transform(ToolTransform({...}))` instead: - -```python -# Deprecated -mcp.add_tool_transformation("name", config) - -# New -from fastmcp.server.transforms import ToolTransform -mcp.add_transform(ToolTransform({"name": config})) -``` +These constructor parameters have been **removed** (not just deprecated) as of rc1. See "Breaking: Deprecated `FastMCP()` Constructor Kwargs Removed" in the rc1 section above. The `add_tool_transformation()` and `remove_tool_transformation()` methods remain as deprecated shims. --- @@ -1257,7 +1376,8 @@ server.disable(names={"my_tool"}, components=["tool"]) Server lookup and listing methods have updated signatures: - Parameter names: `get_tool(name=...)`, `get_resource(uri=...)`, etc. (was `key`) -- Return types: `get_tools()`, `get_resources()`, etc. return lists instead of dicts +- Plural listing methods renamed: `get_tools()` β†’ `list_tools()`, `get_resources()` β†’ `list_resources()`, etc. +- Return types: `list_tools()`, `list_resources()`, etc. return lists instead of dicts ```python # v2.x @@ -1265,7 +1385,7 @@ tools = await server.get_tools() tool = tools["my_tool"] # v3.0 -tools = await server.get_tools() +tools = await server.list_tools() tool = next((t for t in tools if t.name == "my_tool"), None) ``` diff --git a/docs/docs.json b/docs/docs.json index 995cb146c3..fc5499d5a6 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -467,6 +467,7 @@ "python-sdk/fastmcp-server-middleware-__init__", "python-sdk/fastmcp-server-middleware-authorization", "python-sdk/fastmcp-server-middleware-caching", + "python-sdk/fastmcp-server-middleware-dereference", "python-sdk/fastmcp-server-middleware-error_handling", "python-sdk/fastmcp-server-middleware-logging", "python-sdk/fastmcp-server-middleware-middleware", @@ -562,6 +563,7 @@ "python-sdk/fastmcp-server-tasks-elicitation", "python-sdk/fastmcp-server-tasks-handlers", "python-sdk/fastmcp-server-tasks-keys", + "python-sdk/fastmcp-server-tasks-notifications", "python-sdk/fastmcp-server-tasks-requests", "python-sdk/fastmcp-server-tasks-routing", "python-sdk/fastmcp-server-tasks-subscriptions" @@ -665,7 +667,7 @@ "icon": "code" } ], - "version": "v3.0.0 (beta 2)" + "version": "v3.0.0 (rc 1)" }, { "dropdowns": [ @@ -862,7 +864,7 @@ "icon": "book" } ], - "version": "v2.14.3" + "version": "v2.14.5" } ] }, diff --git a/docs/getting-started/installation.mdx b/docs/getting-started/installation.mdx index 0a566bd685..ba1341ea15 100644 --- a/docs/getting-started/installation.mdx +++ b/docs/getting-started/installation.mdx @@ -8,17 +8,17 @@ icon: arrow-down-to-line We recommend using [uv](https://docs.astral.sh/uv/getting-started/installation/) to install and manage FastMCP. -FastMCP 3.0 is currently in beta. Package managers won't install beta versions by defaultβ€”you must explicitly request one (e.g., `>=3.0.0b2`). +FastMCP 3.0 is currently a release candidate. Package managers won't install pre-release versions by defaultβ€”you must explicitly request one (e.g., `>=3.0.0rc1`). ```bash -pip install "fastmcp>=3.0.0b2" +pip install "fastmcp>=3.0.0rc1" ``` Or with uv: ```bash -uv add "fastmcp>=3.0.0b2" +uv add "fastmcp>=3.0.0rc1" ``` ### Optional Dependencies @@ -26,7 +26,7 @@ uv add "fastmcp>=3.0.0b2" FastMCP provides optional extras for specific features. For example, to install the background tasks extra: ```bash -pip install "fastmcp[tasks]==3.0.0b2" +pip install "fastmcp[tasks]==3.0.0rc1" ``` See [Background Tasks](/servers/tasks) for details on the task system. @@ -44,7 +44,7 @@ You should see output like the following: ```bash $ fastmcp version -FastMCP version: 3.0.0 +FastMCP version: 3.0.0rc1 MCP version: 1.25.0 Python version: 3.12.2 Platform: macOS-15.3.1-arm64-arm-64bit diff --git a/docs/getting-started/welcome.mdx b/docs/getting-started/welcome.mdx index cf19fe8f51..5b1fd66e28 100644 --- a/docs/getting-started/welcome.mdx +++ b/docs/getting-started/welcome.mdx @@ -36,7 +36,7 @@ if __name__ == "__main__": ``` -**This documentation is for FastMCP 3.0**, which is currently in beta. For the 2.x release, see the [FastMCP 2.0 documentation](/v2/getting-started/welcome). +**This documentation is for FastMCP 3.0**, which is currently a release candidate. For the 2.x release, see the [FastMCP 2.0 documentation](/v2/getting-started/welcome). FastMCP is made with πŸ’™ by [Prefect](https://www.prefect.io/). diff --git a/docs/integrations/azure.mdx b/docs/integrations/azure.mdx index a5b316d881..4376a38ce0 100644 --- a/docs/integrations/azure.mdx +++ b/docs/integrations/azure.mdx @@ -326,3 +326,135 @@ mcp = FastMCP(name="Azure MI App", auth=auth) For Azure Government, pass `base_authority="login.microsoftonline.us"` to `AzureJWTVerifier`. + +## On-Behalf-Of (OBO) + + + +The On-Behalf-Of (OBO) flow allows your FastMCP server to call downstream Microsoft APIsβ€”like Microsoft Graphβ€”using the authenticated user's identity. When a user authenticates to your MCP server, you receive a token for your API. OBO exchanges that token for a new token that can call other services, maintaining the user's identity and permissions throughout the chain. + +This pattern is useful when your tools need to access user-specific data from Microsoft services: reading emails, accessing calendar events, querying SharePoint, or any other Graph API operation that requires user context. + + +OBO features require the `azure` extra: + +```bash +pip install 'fastmcp[azure]' +``` + + +### Azure Portal Setup + +OBO requires additional configuration in your Azure App registration beyond basic authentication. + + + + In your App registration, navigate to **API permissions** and add the Microsoft Graph permissions your tools will need. + + - Click **Add a permission** β†’ **Microsoft Graph** β†’ **Delegated permissions** + - Select the permissions required for your use case (e.g., `Mail.Read`, `Calendars.Read`, `User.Read`) + - Repeat for any other APIs you need to call + + + Only add delegated permissions for OBO. Application permissions bypass user context entirely and are inappropriate for the OBO flow. + + + + + OBO requires admin consent for the permissions you've added. In the **API permissions** page, click **Grant admin consent for [Your Organization]**. + + Without admin consent, OBO token exchanges will fail with an `AADSTS65001` error indicating the user or administrator hasn't consented to use the application. + + + For development, you can grant consent for just your own account. For production, an Azure AD administrator must grant tenant-wide consent. + + + + +### Configure AzureProvider for OBO + +The `additional_authorize_scopes` parameter tells Azure which downstream API permissions to include during the initial authorization. These scopes establish what your server can request through OBO later. + +```python server.py +from fastmcp import FastMCP +from fastmcp.server.auth.providers.azure import AzureProvider + +auth_provider = AzureProvider( + client_id="your-client-id", + client_secret="your-client-secret", + tenant_id="your-tenant-id", + base_url="http://localhost:8000", + required_scopes=["mcp-access"], # Your API scope + # Include Graph scopes for OBO + additional_authorize_scopes=[ + "https://graph.microsoft.com/Mail.Read", + "https://graph.microsoft.com/User.Read", + "offline_access", # Enables refresh tokens + ], +) + +mcp = FastMCP(name="Graph-Enabled Server", auth=auth_provider) +``` + +Scopes listed in `additional_authorize_scopes` are requested during the initial OAuth flow but aren't validated on incoming tokens. They establish permission for your server to later exchange the user's token for downstream API access. + + +Use fully-qualified scope URIs for downstream APIs (e.g., `https://graph.microsoft.com/Mail.Read`). Short forms like `Mail.Read` work for authorization requests, but fully-qualified URIs are clearer and avoid ambiguity. + + +### EntraOBOToken Dependency + +The `EntraOBOToken` dependency handles the complete OBO flow automatically. Declare it as a parameter default with the scopes you need, and FastMCP exchanges the user's token for a downstream API token before your function runs. + +```python +from fastmcp import FastMCP +from fastmcp.server.auth.providers.azure import AzureProvider, EntraOBOToken +import httpx + +auth_provider = AzureProvider( + client_id="your-client-id", + client_secret="your-client-secret", + tenant_id="your-tenant-id", + base_url="http://localhost:8000", + required_scopes=["mcp-access"], + additional_authorize_scopes=[ + "https://graph.microsoft.com/Mail.Read", + "https://graph.microsoft.com/User.Read", + ], +) + +mcp = FastMCP(name="Email Reader", auth=auth_provider) + +@mcp.tool +async def get_recent_emails( + count: int = 10, + graph_token: str = EntraOBOToken(["https://graph.microsoft.com/Mail.Read"]), +) -> list[dict]: + """Get the user's recent emails from Microsoft Graph.""" + async with httpx.AsyncClient() as client: + response = await client.get( + f"https://graph.microsoft.com/v1.0/me/messages?$top={count}", + headers={"Authorization": f"Bearer {graph_token}"}, + ) + response.raise_for_status() + data = response.json() + + return [ + {"subject": msg["subject"], "from": msg["from"]["emailAddress"]["address"]} + for msg in data.get("value", []) + ] +``` + +The `graph_token` parameter receives a ready-to-use access token for Microsoft Graph. FastMCP handles the OBO exchange transparentlyβ€”your function just uses the token to call the API. + + +**Scope alignment is critical.** The scopes passed to `EntraOBOToken` must be a subset of the scopes in `additional_authorize_scopes`. If you request a scope during OBO that wasn't included in the initial authorization, the exchange will fail. + + + +For advanced OBO scenarios, use `CurrentAccessToken()` to get the user's token, then construct an `azure.identity.aio.OnBehalfOfCredential` directly with your Azure credentials. + + + +For a complete working example of Azure OBO with FastMCP, see [Pamela Fox's blog post on OBO flow for Entra-based MCP servers](https://blog.pamelafox.org/2026/01/using-on-behalf-of-flow-for-entra-based.html). + diff --git a/docs/python-sdk/fastmcp-cli-generate.mdx b/docs/python-sdk/fastmcp-cli-generate.mdx index 28bd3ea7f7..027894e384 100644 --- a/docs/python-sdk/fastmcp-cli-generate.mdx +++ b/docs/python-sdk/fastmcp-cli-generate.mdx @@ -6,7 +6,7 @@ sidebarTitle: generate # `fastmcp.cli.generate` -Generate a standalone CLI script from an MCP server's capabilities. +Generate a standalone CLI script and agent skill from an MCP server. ## Functions @@ -33,7 +33,17 @@ generate_cli_script(server_name: str, server_spec: str, transport_code: str, ext Generate the full CLI script source code. -### `generate_cli_command` +### `generate_skill_content` + +```python +generate_skill_content(server_name: str, cli_filename: str, tools: list[mcp.types.Tool]) -> str +``` + + +Generate a SKILL.md file for a generated CLI script. + + +### `generate_cli_command` ```python generate_cli_command(server_spec: Annotated[str, cyclopts.Parameter(help='Server URL, Python file, MCPConfig JSON, discovered name, or .js file')], output: Annotated[str, cyclopts.Parameter(help='Output file path (default: cli.py)')] = 'cli.py') -> None @@ -43,7 +53,8 @@ generate_cli_command(server_spec: Annotated[str, cyclopts.Parameter(help='Server Generate a standalone CLI script from an MCP server. Connects to the server, reads its tools/resources/prompts, and writes -a Python script that can invoke them directly. +a Python script that can invoke them directly. Also generates a SKILL.md +agent skill file unless --no-skill is passed. **Examples:** @@ -51,4 +62,5 @@ fastmcp generate-cli weather fastmcp generate-cli weather my_cli.py fastmcp generate-cli http://localhost:8000/mcp fastmcp generate-cli server.py output.py -f +fastmcp generate-cli weather --no-skill diff --git a/docs/python-sdk/fastmcp-client-auth-oauth.mdx b/docs/python-sdk/fastmcp-client-auth-oauth.mdx index b9d06beb7c..15ce6e8af1 100644 --- a/docs/python-sdk/fastmcp-client-auth-oauth.mdx +++ b/docs/python-sdk/fastmcp-client-auth-oauth.mdx @@ -73,7 +73,7 @@ a browser for user authorization and running a local callback server. **Methods:** -#### `redirect_handler` +#### `redirect_handler` ```python redirect_handler(self, authorization_url: str) -> None @@ -82,7 +82,7 @@ redirect_handler(self, authorization_url: str) -> None Open browser for authorization, with pre-flight check for invalid client. -#### `callback_handler` +#### `callback_handler` ```python callback_handler(self) -> tuple[str, str | None] @@ -91,7 +91,7 @@ callback_handler(self) -> tuple[str, str | None] Handle OAuth callback and return (auth_code, state). -#### `async_auth_flow` +#### `async_auth_flow` ```python async_auth_flow(self, request: httpx.Request) -> AsyncGenerator[httpx.Request, httpx.Response] diff --git a/docs/python-sdk/fastmcp-client-mixins-prompts.mdx b/docs/python-sdk/fastmcp-client-mixins-prompts.mdx index 7e9c8e8f6f..bf5303caf0 100644 --- a/docs/python-sdk/fastmcp-client-mixins-prompts.mdx +++ b/docs/python-sdk/fastmcp-client-mixins-prompts.mdx @@ -58,7 +58,7 @@ large result sets incrementally), use list_prompts_mcp() with the cursor paramet - `McpError`: If the request results in a TimeoutError | JSONRPCError -#### `get_prompt_mcp` +#### `get_prompt_mcp` ```python get_prompt_mcp(self: Client, name: str, arguments: dict[str, Any] | None = None, meta: dict[str, Any] | None = None) -> mcp.types.GetPromptResult @@ -80,19 +80,19 @@ containing the prompt messages and any additional metadata. - `McpError`: If the request results in a TimeoutError | JSONRPCError -#### `get_prompt` +#### `get_prompt` ```python get_prompt(self: Client, name: str, arguments: dict[str, Any] | None = None) -> mcp.types.GetPromptResult ``` -#### `get_prompt` +#### `get_prompt` ```python get_prompt(self: Client, name: str, arguments: dict[str, Any] | None = None) -> PromptTask ``` -#### `get_prompt` +#### `get_prompt` ```python get_prompt(self: Client, name: str, arguments: dict[str, Any] | None = None) -> mcp.types.GetPromptResult | PromptTask diff --git a/docs/python-sdk/fastmcp-client-mixins-resources.mdx b/docs/python-sdk/fastmcp-client-mixins-resources.mdx index ab89913e98..64aaf5e04c 100644 --- a/docs/python-sdk/fastmcp-client-mixins-resources.mdx +++ b/docs/python-sdk/fastmcp-client-mixins-resources.mdx @@ -58,7 +58,7 @@ large result sets incrementally), use list_resources_mcp() with the cursor param - `McpError`: If the request results in a TimeoutError | JSONRPCError -#### `list_resource_templates_mcp` +#### `list_resource_templates_mcp` ```python list_resource_templates_mcp(self: Client) -> mcp.types.ListResourceTemplatesResult @@ -78,7 +78,7 @@ containing the list of resource templates and any additional metadata. - `McpError`: If the request results in a TimeoutError | JSONRPCError -#### `list_resource_templates` +#### `list_resource_templates` ```python list_resource_templates(self: Client) -> list[mcp.types.ResourceTemplate] @@ -99,7 +99,7 @@ cursor parameter. - `McpError`: If the request results in a TimeoutError | JSONRPCError -#### `read_resource_mcp` +#### `read_resource_mcp` ```python read_resource_mcp(self: Client, uri: AnyUrl | str, meta: dict[str, Any] | None = None) -> mcp.types.ReadResourceResult @@ -120,19 +120,19 @@ containing the resource contents and any additional metadata. - `McpError`: If the request results in a TimeoutError | JSONRPCError -#### `read_resource` +#### `read_resource` ```python read_resource(self: Client, uri: AnyUrl | str) -> list[mcp.types.TextResourceContents | mcp.types.BlobResourceContents] ``` -#### `read_resource` +#### `read_resource` ```python read_resource(self: Client, uri: AnyUrl | str) -> ResourceTask ``` -#### `read_resource` +#### `read_resource` ```python read_resource(self: Client, uri: AnyUrl | str) -> list[mcp.types.TextResourceContents | mcp.types.BlobResourceContents] | ResourceTask diff --git a/docs/python-sdk/fastmcp-client-mixins-tools.mdx b/docs/python-sdk/fastmcp-client-mixins-tools.mdx index 769b267c2f..eda72f44d0 100644 --- a/docs/python-sdk/fastmcp-client-mixins-tools.mdx +++ b/docs/python-sdk/fastmcp-client-mixins-tools.mdx @@ -58,7 +58,7 @@ large result sets incrementally), use list_tools_mcp() with the cursor parameter - `McpError`: If the request results in a TimeoutError | JSONRPCError -#### `call_tool_mcp` +#### `call_tool_mcp` ```python call_tool_mcp(self: Client, name: str, arguments: dict[str, Any], progress_handler: ProgressHandler | None = None, timeout: datetime.timedelta | float | int | None = None, meta: dict[str, Any] | None = None) -> mcp.types.CallToolResult @@ -88,19 +88,19 @@ containing the tool result and any additional metadata. - `McpError`: If the tool call requests results in a TimeoutError | JSONRPCError -#### `call_tool` +#### `call_tool` ```python call_tool(self: Client, name: str, arguments: dict[str, Any] | None = None) -> CallToolResult ``` -#### `call_tool` +#### `call_tool` ```python call_tool(self: Client, name: str, arguments: dict[str, Any] | None = None) -> ToolTask ``` -#### `call_tool` +#### `call_tool` ```python call_tool(self: Client, name: str, arguments: dict[str, Any] | None = None) -> CallToolResult | ToolTask diff --git a/docs/python-sdk/fastmcp-mcp_config.mdx b/docs/python-sdk/fastmcp-mcp_config.mdx index 87ed568a0e..6ecd62b70c 100644 --- a/docs/python-sdk/fastmcp-mcp_config.mdx +++ b/docs/python-sdk/fastmcp-mcp_config.mdx @@ -42,7 +42,7 @@ infer_transport_type_from_url(url: str | AnyUrl) -> Literal['http', 'sse'] Infer the appropriate transport type from the given URL. -### `update_config_file` +### `update_config_file` ```python update_config_file(file_path: Path, server_name: str, server_config: CanonicalMCPServerTypes) -> None @@ -57,7 +57,7 @@ worry about transforming server objects here. ## Classes -### `StdioMCPServer` +### `StdioMCPServer` MCP server configuration for stdio transport. @@ -67,19 +67,19 @@ This is the canonical configuration format for MCP servers using stdio transport **Methods:** -#### `to_transport` +#### `to_transport` ```python to_transport(self) -> StdioTransport ``` -### `TransformingStdioMCPServer` +### `TransformingStdioMCPServer` A Stdio server with tool transforms. -### `RemoteMCPServer` +### `RemoteMCPServer` MCP server configuration for HTTP/SSE transport. @@ -89,19 +89,19 @@ This is the canonical configuration format for MCP servers using remote transpor **Methods:** -#### `to_transport` +#### `to_transport` ```python to_transport(self) -> StreamableHttpTransport | SSETransport ``` -### `TransformingRemoteMCPServer` +### `TransformingRemoteMCPServer` A Remote server with tool transforms. -### `MCPConfig` +### `MCPConfig` A configuration object for MCP Servers that conforms to the canonical MCP configuration format @@ -113,7 +113,7 @@ For an MCPConfig that is strictly canonical, see the `CanonicalMCPConfig` class. **Methods:** -#### `wrap_servers_at_root` +#### `wrap_servers_at_root` ```python wrap_servers_at_root(cls, values: dict[str, Any]) -> dict[str, Any] @@ -122,7 +122,7 @@ wrap_servers_at_root(cls, values: dict[str, Any]) -> dict[str, Any] If there's no mcpServers key but there are server configs at root, wrap them. -#### `add_server` +#### `add_server` ```python add_server(self, name: str, server: MCPServerTypes) -> None @@ -131,7 +131,7 @@ add_server(self, name: str, server: MCPServerTypes) -> None Add or update a server in the configuration. -#### `from_dict` +#### `from_dict` ```python from_dict(cls, config: dict[str, Any]) -> Self @@ -140,7 +140,7 @@ from_dict(cls, config: dict[str, Any]) -> Self Parse MCP configuration from dictionary format. -#### `to_dict` +#### `to_dict` ```python to_dict(self) -> dict[str, Any] @@ -149,7 +149,7 @@ to_dict(self) -> dict[str, Any] Convert MCPConfig to dictionary format, preserving all fields. -#### `write_to_file` +#### `write_to_file` ```python write_to_file(self, file_path: Path) -> None @@ -158,7 +158,7 @@ write_to_file(self, file_path: Path) -> None Write configuration to JSON file. -#### `from_file` +#### `from_file` ```python from_file(cls, file_path: Path) -> Self @@ -167,7 +167,7 @@ from_file(cls, file_path: Path) -> Self Load configuration from JSON file. -### `CanonicalMCPConfig` +### `CanonicalMCPConfig` Canonical MCP configuration format. @@ -178,7 +178,7 @@ The format is designed to be client-agnostic and extensible for future use cases **Methods:** -#### `add_server` +#### `add_server` ```python add_server(self, name: str, server: CanonicalMCPServerTypes) -> None diff --git a/docs/python-sdk/fastmcp-resources-template.mdx b/docs/python-sdk/fastmcp-resources-template.mdx index e55bae102b..0baaedbd6f 100644 --- a/docs/python-sdk/fastmcp-resources-template.mdx +++ b/docs/python-sdk/fastmcp-resources-template.mdx @@ -62,7 +62,7 @@ A template for dynamically creating resources. #### `from_function` ```python -from_function(fn: Callable[..., Any], uri_template: str, name: str | None = None, version: str | int | None = None, title: str | None = None, description: str | None = None, icons: list[Icon] | None = None, mime_type: str | None = None, tags: set[str] | None = None, annotations: Annotations | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, auth: AuthCheckCallable | list[AuthCheckCallable] | None = None) -> FunctionResourceTemplate +from_function(fn: Callable[..., Any], uri_template: str, name: str | None = None, version: str | int | None = None, title: str | None = None, description: str | None = None, icons: list[Icon] | None = None, mime_type: str | None = None, tags: set[str] | None = None, annotations: Annotations | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, auth: AuthCheck | list[AuthCheck] | None = None) -> FunctionResourceTemplate ``` #### `set_default_mime_type` @@ -237,7 +237,7 @@ FunctionResourceTemplate splats the params dict since .fn expects **kwargs. #### `from_function` ```python -from_function(cls, fn: Callable[..., Any], uri_template: str, name: str | None = None, version: str | int | None = None, title: str | None = None, description: str | None = None, icons: list[Icon] | None = None, mime_type: str | None = None, tags: set[str] | None = None, annotations: Annotations | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, auth: AuthCheckCallable | list[AuthCheckCallable] | None = None) -> FunctionResourceTemplate +from_function(cls, fn: Callable[..., Any], uri_template: str, name: str | None = None, version: str | int | None = None, title: str | None = None, description: str | None = None, icons: list[Icon] | None = None, mime_type: str | None = None, tags: set[str] | None = None, annotations: Annotations | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, auth: AuthCheck | list[AuthCheck] | None = None) -> FunctionResourceTemplate ``` Create a template from a function. diff --git a/docs/python-sdk/fastmcp-server-apps.mdx b/docs/python-sdk/fastmcp-server-apps.mdx index c72052339f..ad006b4921 100644 --- a/docs/python-sdk/fastmcp-server-apps.mdx +++ b/docs/python-sdk/fastmcp-server-apps.mdx @@ -15,17 +15,17 @@ UI metadata for clients that support interactive app rendering. ## Functions -### `ui_to_meta_dict` +### `app_config_to_meta_dict` ```python -ui_to_meta_dict(ui: ToolUI | ResourceUI | dict[str, Any]) -> dict[str, Any] +app_config_to_meta_dict(app: AppConfig | dict[str, Any]) -> dict[str, Any] ``` -Convert a UI model or dict to the wire-format dict for ``meta["ui"]``. +Convert an AppConfig or dict to the wire-format dict for ``meta["ui"]``. -### `resolve_ui_mime_type` +### `resolve_ui_mime_type` ```python resolve_ui_mime_type(uri: str, explicit_mime_type: str | None) -> str | None @@ -70,18 +70,17 @@ iframe. Hosts MAY honour these; apps should use JS feature detection as a fallback. -### `ToolUI` +### `AppConfig` -Typed ``_meta.ui`` for tools β€” links a tool to its UI resource. +Configuration for MCP App tools and resources. + +Controls how a tool or resource participates in the MCP Apps extension. +On tools, ``resource_uri`` and ``visibility`` specify which UI resource +to render and where the tool appears. On resources, those fields must +be left unset (the resource itself is the UI). All fields use ``exclude_none`` serialization so only explicitly-set values appear on the wire. Aliases match the MCP Apps wire format (camelCase). - -### `ResourceUI` - - -Typed ``_meta.ui`` for resources β€” rendering hints for UI-capable clients. - diff --git a/docs/python-sdk/fastmcp-server-auth-authorization.mdx b/docs/python-sdk/fastmcp-server-auth-authorization.mdx index 6268118d8f..28663f6f89 100644 --- a/docs/python-sdk/fastmcp-server-auth-authorization.mdx +++ b/docs/python-sdk/fastmcp-server-auth-authorization.mdx @@ -36,7 +36,7 @@ Example: ## Functions -### `require_scopes` +### `require_scopes` ```python require_scopes(*scopes: str) -> AuthCheck @@ -52,7 +52,7 @@ in the token (AND logic). - `*scopes`: One or more scope strings that must all be present. -### `restrict_tag` +### `restrict_tag` ```python restrict_tag(tag: str) -> AuthCheck @@ -69,7 +69,7 @@ required scopes. If the component doesn't have the tag, access is allowed. - `scopes`: List of scopes required when the tag is present. -### `run_auth_checks` +### `run_auth_checks` ```python run_auth_checks(checks: AuthCheck | list[AuthCheck], ctx: AuthContext) -> bool @@ -78,7 +78,8 @@ run_auth_checks(checks: AuthCheck | list[AuthCheck], ctx: AuthContext) -> bool Run auth checks with AND logic. -All checks must pass for authorization to succeed. +All checks must pass for authorization to succeed. Checks can be +synchronous or asynchronous functions. Auth checks can: - Return True to allow access @@ -88,6 +89,7 @@ Auth checks can: **Args:** - `checks`: A single check function or list of check functions. +Each check can be sync (returns bool) or async (returns Awaitable[bool]). - `ctx`: The auth context to pass to each check. **Returns:** @@ -99,7 +101,7 @@ Auth checks can: ## Classes -### `AuthContext` +### `AuthContext` Context passed to auth check callables. @@ -115,7 +117,7 @@ access to the current authentication token and the component being accessed. **Methods:** -#### `tool` +#### `tool` ```python tool(self) -> Tool | None diff --git a/docs/python-sdk/fastmcp-server-auth-providers-azure.mdx b/docs/python-sdk/fastmcp-server-auth-providers-azure.mdx index 2e403a611e..e5773c6fdb 100644 --- a/docs/python-sdk/fastmcp-server-auth-providers-azure.mdx +++ b/docs/python-sdk/fastmcp-server-auth-providers-azure.mdx @@ -12,9 +12,38 @@ This provider implements Azure/Microsoft Entra ID OAuth authentication using the OAuth Proxy pattern for non-DCR OAuth flows. +## Functions + +### `EntraOBOToken` + +```python +EntraOBOToken(scopes: list[str]) -> str +``` + + +Exchange the user's Entra token for a downstream API token via OBO. + +This dependency performs a Microsoft Entra On-Behalf-Of (OBO) token exchange, +allowing your MCP server to call downstream APIs (like Microsoft Graph) on +behalf of the authenticated user. + +**Args:** +- `scopes`: The scopes to request for the downstream API. For Microsoft Graph, +use scopes like ["https\://graph.microsoft.com/Mail.Read"] or +["https\://graph.microsoft.com/.default"]. + +**Returns:** +- A dependency that resolves to the downstream API access token string + +**Raises:** +- `ImportError`: If fastmcp[azure] is not installed +- `RuntimeError`: If no access token is available, provider is not Azure, +or OBO exchange fails + + ## Classes -### `AzureProvider` +### `AzureProvider` Azure (Microsoft Entra) OAuth provider for FastMCP. @@ -49,7 +78,7 @@ Setup: **Methods:** -#### `authorize` +#### `authorize` ```python authorize(self, client: OAuthClientInformationFull, params: AuthorizationParams) -> str @@ -69,7 +98,29 @@ scopes to determine the resource/audience instead of a separate parameter. - Authorization URL to redirect the user to Azure AD -### `AzureJWTVerifier` +#### `create_obo_credential` + +```python +create_obo_credential(self, user_assertion: str) -> OnBehalfOfCredential +``` + +Create an OnBehalfOfCredential for OBO token exchange. + +Uses the AzureProvider's configuration (client_id, client_secret, +tenant_id, authority) to create a credential that can exchange the +user's token for downstream API tokens. + +**Args:** +- `user_assertion`: The user's access token to exchange via OBO. + +**Returns:** +- A configured OnBehalfOfCredential ready for get_token() calls. + +**Raises:** +- `ImportError`: If azure-identity is not installed (requires fastmcp[azure]). + + +### `AzureJWTVerifier` JWT verifier pre-configured for Azure AD / Microsoft Entra ID. @@ -106,7 +157,7 @@ Example:: **Methods:** -#### `scopes_supported` +#### `scopes_supported` ```python scopes_supported(self) -> list[str] diff --git a/docs/python-sdk/fastmcp-server-context.mdx b/docs/python-sdk/fastmcp-server-context.mdx index 060f0c4d5e..5c266ed117 100644 --- a/docs/python-sdk/fastmcp-server-context.mdx +++ b/docs/python-sdk/fastmcp-server-context.mdx @@ -77,6 +77,9 @@ async def my_tool(x: int, ctx: Context) -> str: await ctx.set_state("key", "value") value = await ctx.get_state("key") + # Store non-serializable values for the current request only + await ctx.set_state("client", http_client, serializable=False) + return str(x) ``` @@ -96,7 +99,7 @@ The context is optional - tools that don't need it can omit the parameter. **Methods:** -#### `is_background_task` +#### `is_background_task` ```python is_background_task(self) -> bool @@ -109,7 +112,7 @@ task-aware implementations that can pause the task and wait for client input. -#### `task_id` +#### `task_id` ```python task_id(self) -> str | None @@ -120,7 +123,7 @@ Get the background task ID if running in a background task. Returns None if not running in a background task context. -#### `fastmcp` +#### `fastmcp` ```python fastmcp(self) -> FastMCP @@ -129,7 +132,7 @@ fastmcp(self) -> FastMCP Get the FastMCP instance. -#### `request_context` +#### `request_context` ```python request_context(self) -> RequestContext[ServerSession, Any, Request] | None @@ -158,7 +161,7 @@ async def on_request(self, context, call_next): ``` -#### `lifespan_context` +#### `lifespan_context` ```python lifespan_context(self) -> dict[str, Any] @@ -170,6 +173,10 @@ Returns the context dict yielded by the server's lifespan function. Returns an empty dict if no lifespan was configured or if the MCP session is not yet established. +In background tasks (Docket workers), where request_context is not +available, falls back to reading from the FastMCP server's lifespan +result directly. + Example: ```python @server.tool @@ -181,7 +188,7 @@ def my_tool(ctx: Context) -> str: ``` -#### `report_progress` +#### `report_progress` ```python report_progress(self, progress: float, total: float | None = None, message: str | None = None) -> None @@ -189,12 +196,16 @@ report_progress(self, progress: float, total: float | None = None, message: str Report progress for the current operation. +Works in both foreground (MCP progress notifications) and background +(Docket task execution) contexts. + **Args:** - `progress`: Current progress value e.g. 24 - `total`: Optional total value e.g. 100 +- `message`: Optional status message describing current progress -#### `list_resources` +#### `list_resources` ```python list_resources(self) -> list[SDKResource] @@ -206,7 +217,7 @@ List all available resources from the server. - List of Resource objects available on the server -#### `list_prompts` +#### `list_prompts` ```python list_prompts(self) -> list[SDKPrompt] @@ -218,7 +229,7 @@ List all available prompts from the server. - List of Prompt objects available on the server -#### `get_prompt` +#### `get_prompt` ```python get_prompt(self, name: str, arguments: dict[str, Any] | None = None) -> GetPromptResult @@ -234,7 +245,7 @@ Get a prompt by name with optional arguments. - The prompt result -#### `read_resource` +#### `read_resource` ```python read_resource(self, uri: str | AnyUrl) -> ResourceResult @@ -249,7 +260,7 @@ Read a resource by URI. - ResourceResult with contents -#### `log` +#### `log` ```python log(self, message: str, level: LoggingLevel | None = None, logger_name: str | None = None, extra: Mapping[str, Any] | None = None) -> None @@ -267,7 +278,7 @@ Messages sent to Clients are also logged to the `fastmcp.server.context.to_clien - `extra`: Optional mapping for additional arguments -#### `transport` +#### `transport` ```python transport(self) -> TransportType | None @@ -279,7 +290,7 @@ Returns the transport type used to run this server: "stdio", "sse", or "streamable-http". Returns None if called outside of a server context. -#### `client_supports_extension` +#### `client_supports_extension` ```python client_supports_extension(self, extension_id: str) -> bool @@ -304,7 +315,7 @@ Example:: return "text-only client" -#### `client_id` +#### `client_id` ```python client_id(self) -> str | None @@ -313,7 +324,7 @@ client_id(self) -> str | None Get the client ID if available. -#### `request_id` +#### `request_id` ```python request_id(self) -> str @@ -324,7 +335,7 @@ Get the unique ID for this request. Raises RuntimeError if MCP request context is not available. -#### `session_id` +#### `session_id` ```python session_id(self) -> str @@ -341,7 +352,7 @@ the same client session. - for other transports. -#### `session` +#### `session` ```python session(self) -> ServerSession @@ -355,7 +366,7 @@ In background task mode: Returns the session stored at Context creation. Raises RuntimeError if no session is available. -#### `debug` +#### `debug` ```python debug(self, message: str, logger_name: str | None = None, extra: Mapping[str, Any] | None = None) -> None @@ -366,7 +377,7 @@ Send a `DEBUG`-level message to the connected MCP Client. Messages sent to Clients are also logged to the `fastmcp.server.context.to_client` logger with a level of `DEBUG`. -#### `info` +#### `info` ```python info(self, message: str, logger_name: str | None = None, extra: Mapping[str, Any] | None = None) -> None @@ -377,7 +388,7 @@ Send a `INFO`-level message to the connected MCP Client. Messages sent to Clients are also logged to the `fastmcp.server.context.to_client` logger with a level of `DEBUG`. -#### `warning` +#### `warning` ```python warning(self, message: str, logger_name: str | None = None, extra: Mapping[str, Any] | None = None) -> None @@ -388,7 +399,7 @@ Send a `WARNING`-level message to the connected MCP Client. Messages sent to Clients are also logged to the `fastmcp.server.context.to_client` logger with a level of `DEBUG`. -#### `error` +#### `error` ```python error(self, message: str, logger_name: str | None = None, extra: Mapping[str, Any] | None = None) -> None @@ -399,7 +410,7 @@ Send a `ERROR`-level message to the connected MCP Client. Messages sent to Clients are also logged to the `fastmcp.server.context.to_client` logger with a level of `DEBUG`. -#### `list_roots` +#### `list_roots` ```python list_roots(self) -> list[Root] @@ -408,7 +419,7 @@ list_roots(self) -> list[Root] List the roots available to the server, as indicated by the client. -#### `send_notification` +#### `send_notification` ```python send_notification(self, notification: mcp.types.ServerNotificationType) -> None @@ -420,7 +431,7 @@ Send a notification to the client immediately. - `notification`: An MCP notification instance (e.g., ToolListChangedNotification()) -#### `close_sse_stream` +#### `close_sse_stream` ```python close_sse_stream(self) -> None @@ -438,7 +449,7 @@ Instead of holding a connection open for minutes, you can periodically close and let the client reconnect. -#### `sample_step` +#### `sample_step` ```python sample_step(self, messages: str | Sequence[str | SamplingMessage]) -> SampleStep @@ -465,6 +476,12 @@ in the step for manual execution. - `mask_error_details`: If True, mask detailed error messages from tool execution. When None (default), uses the global settings value. Tools can raise ToolError to bypass masking. +- `tool_concurrency`: Controls parallel execution of tools\: +- None (default)\: Sequential execution (one at a time) +- 0\: Unlimited parallel execution +- N > 0\: Execute at most N tools concurrently +If any tool has sequential=True, all tools execute sequentially +regardless of this setting. **Returns:** - SampleStep containing: @@ -475,7 +492,7 @@ Tools can raise ToolError to bypass masking. - - .text: The text content (if any) -#### `sample` +#### `sample` ```python sample(self, messages: str | Sequence[str | SamplingMessage]) -> SamplingResult[ResultT] @@ -484,7 +501,7 @@ sample(self, messages: str | Sequence[str | SamplingMessage]) -> SamplingResult[ Overload: With result_type, returns SamplingResult[ResultT]. -#### `sample` +#### `sample` ```python sample(self, messages: str | Sequence[str | SamplingMessage]) -> SamplingResult[str] @@ -493,7 +510,7 @@ sample(self, messages: str | Sequence[str | SamplingMessage]) -> SamplingResult[ Overload: Without result_type, returns SamplingResult[str]. -#### `sample` +#### `sample` ```python sample(self, messages: str | Sequence[str | SamplingMessage]) -> SamplingResult[ResultT] | SamplingResult[str] @@ -527,6 +544,12 @@ response is validated against this type. - `mask_error_details`: If True, mask detailed error messages from tool execution. When None (default), uses the global settings value. Tools can raise ToolError to bypass masking. +- `tool_concurrency`: Controls parallel execution of tools\: +- None (default)\: Sequential execution (one at a time) +- 0\: Unlimited parallel execution +- N > 0\: Execute at most N tools concurrently +If any tool has sequential=True, all tools execute sequentially +regardless of this setting. **Returns:** - SamplingResult[T] containing: @@ -535,43 +558,43 @@ Tools can raise ToolError to bypass masking. - - .history: All messages exchanged during sampling -#### `elicit` +#### `elicit` ```python elicit(self, message: str, response_type: None) -> AcceptedElicitation[dict[str, Any]] | DeclinedElicitation | CancelledElicitation ``` -#### `elicit` +#### `elicit` ```python elicit(self, message: str, response_type: type[T]) -> AcceptedElicitation[T] | DeclinedElicitation | CancelledElicitation ``` -#### `elicit` +#### `elicit` ```python elicit(self, message: str, response_type: list[str]) -> AcceptedElicitation[str] | DeclinedElicitation | CancelledElicitation ``` -#### `elicit` +#### `elicit` ```python elicit(self, message: str, response_type: dict[str, dict[str, str]]) -> AcceptedElicitation[str] | DeclinedElicitation | CancelledElicitation ``` -#### `elicit` +#### `elicit` ```python elicit(self, message: str, response_type: list[list[str]]) -> AcceptedElicitation[list[str]] | DeclinedElicitation | CancelledElicitation ``` -#### `elicit` +#### `elicit` ```python elicit(self, message: str, response_type: list[dict[str, dict[str, str]]]) -> AcceptedElicitation[list[str]] | DeclinedElicitation | CancelledElicitation ``` -#### `elicit` +#### `elicit` ```python elicit(self, message: str, response_type: type[T] | list[str] | dict[str, dict[str, str]] | list[list[str]] | list[dict[str, dict[str, str]]] | None = None) -> AcceptedElicitation[T] | AcceptedElicitation[dict[str, Any]] | AcceptedElicitation[str] | AcceptedElicitation[list[str]] | DeclinedElicitation | CancelledElicitation @@ -600,40 +623,53 @@ type or dataclass or BaseModel. If it is a primitive type, an object schema with a single "value" field will be generated. -#### `set_state` +#### `set_state` ```python set_state(self, key: str, value: Any) -> None ``` -Set a value in the session-scoped state store. +Set a value in the state store. + +By default, values are stored in the session-scoped state store and +persist across requests within the same MCP session. Values must be +JSON-serializable (dicts, lists, strings, numbers, etc.). + +For non-serializable values (e.g., HTTP clients, database connections), +pass ``serializable=False``. These values are stored in a request-scoped +dict and only live for the current MCP request (tool call, resource +read, or prompt render). They will not be available in subsequent +requests. -Values persist across requests within the same MCP session. The key is automatically prefixed with the session identifier. -State expires after 1 day to prevent unbounded memory growth. -#### `get_state` +#### `get_state` ```python get_state(self, key: str) -> Any ``` -Get a value from the session-scoped state store. +Get a value from the state store. + +Checks request-scoped state first (set with ``serializable=False``), +then falls back to the session-scoped state store. Returns None if the key is not found. -#### `delete_state` +#### `delete_state` ```python delete_state(self, key: str) -> None ``` -Delete a value from the session-scoped state store. +Delete a value from the state store. + +Removes from both request-scoped and session-scoped stores. -#### `enable_components` +#### `enable_components` ```python enable_components(self) -> None @@ -657,7 +693,7 @@ ResourceListChangedNotification, and PromptListChangedNotification. - `match_all`: If True, matches all components regardless of other criteria. -#### `disable_components` +#### `disable_components` ```python disable_components(self) -> None @@ -681,7 +717,7 @@ ResourceListChangedNotification, and PromptListChangedNotification. - `match_all`: If True, matches all components regardless of other criteria. -#### `reset_visibility` +#### `reset_visibility` ```python reset_visibility(self) -> None diff --git a/docs/python-sdk/fastmcp-server-dependencies.mdx b/docs/python-sdk/fastmcp-server-dependencies.mdx index b066c8c8ea..5660ac13af 100644 --- a/docs/python-sdk/fastmcp-server-dependencies.mdx +++ b/docs/python-sdk/fastmcp-server-dependencies.mdx @@ -15,7 +15,7 @@ CurrentWorker) and background task execution require fastmcp[tasks]. ## Functions -### `get_task_context` +### `get_task_context` ```python get_task_context() -> TaskContextInfo | None @@ -31,7 +31,7 @@ Returns None if not running in a task context (e.g., foreground execution). - TaskContextInfo with task_id and session_id, or None if not in a task. -### `register_task_session` +### `register_task_session` ```python register_task_session(session_id: str, session: ServerSession) -> None @@ -49,7 +49,7 @@ client disconnects. - `session`: The ServerSession instance -### `get_task_session` +### `get_task_session` ```python get_task_session(session_id: str) -> ServerSession | None @@ -65,7 +65,7 @@ Get a registered session by ID if still alive. - The ServerSession if found and alive, None otherwise -### `is_docket_available` +### `is_docket_available` ```python is_docket_available() -> bool @@ -75,7 +75,7 @@ is_docket_available() -> bool Check if pydocket is installed. -### `require_docket` +### `require_docket` ```python require_docket(feature: str) -> None @@ -89,7 +89,7 @@ Raise ImportError with install instructions if docket not available. "CurrentDocket()"). Will be included in the error message. -### `transform_context_annotations` +### `transform_context_annotations` ```python transform_context_annotations(fn: Callable[..., Any]) -> Callable[..., Any] @@ -115,7 +115,7 @@ allows them to have defaults in any order. - Function with modified signature (same function object, updated __signature__) -### `get_context` +### `get_context` ```python get_context() -> Context @@ -125,7 +125,7 @@ get_context() -> Context Get the current FastMCP Context instance directly. -### `get_server` +### `get_server` ```python get_server() -> FastMCP @@ -141,7 +141,7 @@ Get the current FastMCP server instance directly. - `RuntimeError`: If no server in context -### `get_http_request` +### `get_http_request` ```python get_http_request() -> Request @@ -153,7 +153,7 @@ Get the current HTTP request. Tries MCP SDK's request_ctx first, then falls back to FastMCP's HTTP context. -### `get_http_headers` +### `get_http_headers` ```python get_http_headers(include_all: bool = False) -> dict[str, str] @@ -169,7 +169,7 @@ By default, strips problematic headers like `content-length` that cause issues if forwarded to downstream clients. If `include_all` is True, all headers are returned. -### `get_access_token` +### `get_access_token` ```python get_access_token() -> AccessToken | None @@ -181,13 +181,14 @@ Get the FastMCP access token from the current context. This function first tries to get the token from the current HTTP request's scope, which is more reliable for long-lived connections where the SDK's auth_context_var may become stale after token refresh. Falls back to the SDK's context var if no -request is available. +request is available. In background tasks (Docket workers), falls back to the +token snapshot stored in Redis at task submission time. **Returns:** - The access token if an authenticated user is available, None otherwise. -### `without_injected_parameters` +### `without_injected_parameters` ```python without_injected_parameters(fn: Callable[..., Any]) -> Callable[..., Any] @@ -212,7 +213,7 @@ Handles: - Async wrapper function without injected parameters -### `resolve_dependencies` +### `resolve_dependencies` ```python resolve_dependencies(fn: Callable[..., Any], arguments: dict[str, Any]) -> AsyncGenerator[dict[str, Any], None] @@ -238,7 +239,7 @@ time, so all injection goes through the unified DI system. which will be filtered out) -### `CurrentContext` +### `CurrentContext` ```python CurrentContext() -> Context @@ -257,7 +258,7 @@ current MCP operation (tool/resource/prompt call). - `RuntimeError`: If no active context found (during resolution) -### `CurrentDocket` +### `CurrentDocket` ```python CurrentDocket() -> Docket @@ -277,7 +278,7 @@ automatically creates for background task scheduling. - `ImportError`: If fastmcp[tasks] not installed -### `CurrentWorker` +### `CurrentWorker` ```python CurrentWorker() -> Worker @@ -297,7 +298,7 @@ automatically creates for background task processing. - `ImportError`: If fastmcp[tasks] not installed -### `CurrentFastMCP` +### `CurrentFastMCP` ```python CurrentFastMCP() -> FastMCP @@ -315,7 +316,7 @@ This dependency provides access to the active FastMCP server. - `RuntimeError`: If no server in context (during resolution) -### `CurrentRequest` +### `CurrentRequest` ```python CurrentRequest() -> Request @@ -335,7 +336,7 @@ current HTTP request. Only available when running over HTTP transports - `RuntimeError`: If no HTTP request in context (e.g., STDIO transport) -### `CurrentHeaders` +### `CurrentHeaders` ```python CurrentHeaders() -> dict[str, str] @@ -352,7 +353,7 @@ safe to use in code that might run over any transport. - A dependency that resolves to a dictionary of header name -> value -### `CurrentAccessToken` +### `CurrentAccessToken` ```python CurrentAccessToken() -> AccessToken @@ -371,9 +372,32 @@ authenticated request. Raises an error if no authentication is present. - `RuntimeError`: If no authenticated user (use get_access_token() for optional) +### `TokenClaim` + +```python +TokenClaim(name: str) -> str +``` + + +Get a specific claim from the access token. + +This dependency extracts a single claim value from the current access token. +It's useful for getting user identifiers, roles, or other token claims +without needing the full token object. + +**Args:** +- `name`: The name of the claim to extract (e.g., "oid", "sub", "email") + +**Returns:** +- A dependency that resolves to the claim value as a string + +**Raises:** +- `RuntimeError`: If no access token is available or claim is missing + + ## Classes -### `TaskContextInfo` +### `TaskContextInfo` Information about the current background task context. @@ -382,7 +406,7 @@ Returned by ``get_task_context()`` when running inside a Docket worker. Contains identifiers needed to communicate with the MCP session. -### `ProgressLike` +### `ProgressLike` Protocol for progress tracking interface. @@ -393,7 +417,7 @@ and Docket's Progress (worker context). **Methods:** -#### `current` +#### `current` ```python current(self) -> int | None @@ -402,7 +426,7 @@ current(self) -> int | None Current progress value. -#### `total` +#### `total` ```python total(self) -> int @@ -411,7 +435,7 @@ total(self) -> int Total/target progress value. -#### `message` +#### `message` ```python message(self) -> str | None @@ -420,7 +444,7 @@ message(self) -> str | None Current progress message. -#### `set_total` +#### `set_total` ```python set_total(self, total: int) -> None @@ -429,7 +453,7 @@ set_total(self, total: int) -> None Set the total/target value for progress tracking. -#### `increment` +#### `increment` ```python increment(self, amount: int = 1) -> None @@ -438,7 +462,7 @@ increment(self, amount: int = 1) -> None Atomically increment the current progress value. -#### `set_message` +#### `set_message` ```python set_message(self, message: str | None) -> None @@ -447,7 +471,7 @@ set_message(self, message: str | None) -> None Update the progress status message. -### `InMemoryProgress` +### `InMemoryProgress` In-memory progress tracker for immediate tool execution. @@ -459,25 +483,25 @@ progress doesn't need to be observable across processes. **Methods:** -#### `current` +#### `current` ```python current(self) -> int | None ``` -#### `total` +#### `total` ```python total(self) -> int ``` -#### `message` +#### `message` ```python message(self) -> str | None ``` -#### `set_total` +#### `set_total` ```python set_total(self, total: int) -> None @@ -486,7 +510,7 @@ set_total(self, total: int) -> None Set the total/target value for progress tracking. -#### `increment` +#### `increment` ```python increment(self, amount: int = 1) -> None @@ -495,7 +519,7 @@ increment(self, amount: int = 1) -> None Atomically increment the current progress value. -#### `set_message` +#### `set_message` ```python set_message(self, message: str | None) -> None @@ -504,7 +528,7 @@ set_message(self, message: str | None) -> None Update the progress status message. -### `Progress` +### `Progress` FastMCP Progress dependency that works in both server and worker contexts. diff --git a/docs/python-sdk/fastmcp-server-middleware-caching.mdx b/docs/python-sdk/fastmcp-server-middleware-caching.mdx index 8a997b6dfe..8b3f691a19 100644 --- a/docs/python-sdk/fastmcp-server-middleware-caching.mdx +++ b/docs/python-sdk/fastmcp-server-middleware-caching.mdx @@ -151,7 +151,7 @@ Notes: **Methods:** -#### `on_list_tools` +#### `on_list_tools` ```python on_list_tools(self, context: MiddlewareContext[mcp.types.ListToolsRequest], call_next: CallNext[mcp.types.ListToolsRequest, Sequence[Tool]]) -> Sequence[Tool] @@ -161,7 +161,7 @@ List tools from the cache, if caching is enabled, and the result is in the cache otherwise call the next middleware and store the result in the cache if caching is enabled. -#### `on_list_resources` +#### `on_list_resources` ```python on_list_resources(self, context: MiddlewareContext[mcp.types.ListResourcesRequest], call_next: CallNext[mcp.types.ListResourcesRequest, Sequence[Resource]]) -> Sequence[Resource] @@ -171,7 +171,7 @@ List resources from the cache, if caching is enabled, and the result is in the c otherwise call the next middleware and store the result in the cache if caching is enabled. -#### `on_list_prompts` +#### `on_list_prompts` ```python on_list_prompts(self, context: MiddlewareContext[mcp.types.ListPromptsRequest], call_next: CallNext[mcp.types.ListPromptsRequest, Sequence[Prompt]]) -> Sequence[Prompt] @@ -181,7 +181,7 @@ List prompts from the cache, if caching is enabled, and the result is in the cac otherwise call the next middleware and store the result in the cache if caching is enabled. -#### `on_call_tool` +#### `on_call_tool` ```python on_call_tool(self, context: MiddlewareContext[mcp.types.CallToolRequestParams], call_next: CallNext[mcp.types.CallToolRequestParams, ToolResult]) -> ToolResult @@ -191,7 +191,7 @@ Call a tool from the cache, if caching is enabled, and the result is in the cach otherwise call the next middleware and store the result in the cache if caching is enabled. -#### `on_read_resource` +#### `on_read_resource` ```python on_read_resource(self, context: MiddlewareContext[mcp.types.ReadResourceRequestParams], call_next: CallNext[mcp.types.ReadResourceRequestParams, ResourceResult]) -> ResourceResult @@ -201,7 +201,7 @@ Read a resource from the cache, if caching is enabled, and the result is in the otherwise call the next middleware and store the result in the cache if caching is enabled. -#### `on_get_prompt` +#### `on_get_prompt` ```python on_get_prompt(self, context: MiddlewareContext[mcp.types.GetPromptRequestParams], call_next: CallNext[mcp.types.GetPromptRequestParams, PromptResult]) -> PromptResult @@ -211,7 +211,7 @@ Get a prompt from the cache, if caching is enabled, and the result is in the cac otherwise call the next middleware and store the result in the cache if caching is enabled. -#### `statistics` +#### `statistics` ```python statistics(self) -> ResponseCachingStatistics diff --git a/docs/python-sdk/fastmcp-server-middleware-dereference.mdx b/docs/python-sdk/fastmcp-server-middleware-dereference.mdx new file mode 100644 index 0000000000..702c7a1d73 --- /dev/null +++ b/docs/python-sdk/fastmcp-server-middleware-dereference.mdx @@ -0,0 +1,35 @@ +--- +title: dereference +sidebarTitle: dereference +--- + +# `fastmcp.server.middleware.dereference` + + +Middleware that dereferences $ref in JSON schemas before sending to clients. + +## Classes + +### `DereferenceRefsMiddleware` + + +Dereferences $ref in component schemas before sending to clients. + +Some MCP clients (e.g., VS Code Copilot) don't handle JSON Schema $ref +properly. This middleware inlines all $ref definitions so schemas are +self-contained. Enabled by default via ``FastMCP(dereference_schemas=True)``. + + +**Methods:** + +#### `on_list_tools` + +```python +on_list_tools(self, context: MiddlewareContext[mt.ListToolsRequest], call_next: CallNext[mt.ListToolsRequest, Sequence[Tool]]) -> Sequence[Tool] +``` + +#### `on_list_resource_templates` + +```python +on_list_resource_templates(self, context: MiddlewareContext[mt.ListResourceTemplatesRequest], call_next: CallNext[mt.ListResourceTemplatesRequest, Sequence[ResourceTemplate]]) -> Sequence[ResourceTemplate] +``` diff --git a/docs/python-sdk/fastmcp-server-mixins-transport.mdx b/docs/python-sdk/fastmcp-server-mixins-transport.mdx index ed61e5a5d2..0ce07f2fb9 100644 --- a/docs/python-sdk/fastmcp-server-mixins-transport.mdx +++ b/docs/python-sdk/fastmcp-server-mixins-transport.mdx @@ -104,7 +104,7 @@ Run the server using HTTP transport. - `stateless`: Alias for stateless_http for CLI consistency -#### `http_app` +#### `http_app` ```python http_app(self: FastMCP, path: str | None = None, middleware: list[ASGIMiddleware] | None = None, json_response: bool | None = None, stateless_http: bool | None = None, transport: Literal['http', 'streamable-http', 'sse'] = 'http', event_store: EventStore | None = None, retry_interval: int | None = None) -> StarletteWithLifespan diff --git a/docs/python-sdk/fastmcp-server-providers-local_provider-decorators-tools.mdx b/docs/python-sdk/fastmcp-server-providers-local_provider-decorators-tools.mdx index fd501251e5..44558f5608 100644 --- a/docs/python-sdk/fastmcp-server-providers-local_provider-decorators-tools.mdx +++ b/docs/python-sdk/fastmcp-server-providers-local_provider-decorators-tools.mdx @@ -14,7 +14,7 @@ registration functionality to LocalProvider. ## Classes -### `ToolDecoratorMixin` +### `ToolDecoratorMixin` Mixin class providing tool decorator functionality for LocalProvider. @@ -26,7 +26,7 @@ This mixin contains all methods related to: **Methods:** -#### `add_tool` +#### `add_tool` ```python add_tool(self: LocalProvider, tool: Tool | Callable[..., Any]) -> Tool @@ -37,19 +37,19 @@ Add a tool to this provider's storage. Accepts either a Tool object or a decorated function with __fastmcp__ metadata. -#### `tool` +#### `tool` ```python tool(self: LocalProvider, name_or_fn: AnyFunction) -> FunctionTool ``` -#### `tool` +#### `tool` ```python tool(self: LocalProvider, name_or_fn: str | None = None) -> Callable[[AnyFunction], FunctionTool] ``` -#### `tool` +#### `tool` ```python tool(self: LocalProvider, name_or_fn: str | AnyFunction | None = None) -> Callable[[AnyFunction], FunctionTool] | FunctionTool | partial[Callable[[AnyFunction], FunctionTool] | FunctionTool] diff --git a/docs/python-sdk/fastmcp-server-providers-openapi-components.mdx b/docs/python-sdk/fastmcp-server-providers-openapi-components.mdx index bc0b40d3cd..568d3d8d34 100644 --- a/docs/python-sdk/fastmcp-server-providers-openapi-components.mdx +++ b/docs/python-sdk/fastmcp-server-providers-openapi-components.mdx @@ -27,7 +27,7 @@ run(self, arguments: dict[str, Any]) -> ToolResult Execute the HTTP request using RequestDirector. -### `OpenAPIResource` +### `OpenAPIResource` Resource implementation for OpenAPI endpoints. @@ -35,7 +35,7 @@ Resource implementation for OpenAPI endpoints. **Methods:** -#### `read` +#### `read` ```python read(self) -> ResourceResult @@ -44,7 +44,7 @@ read(self) -> ResourceResult Fetch the resource data by making an HTTP request. -### `OpenAPIResourceTemplate` +### `OpenAPIResourceTemplate` Resource template implementation for OpenAPI endpoints. @@ -52,7 +52,7 @@ Resource template implementation for OpenAPI endpoints. **Methods:** -#### `create_resource` +#### `create_resource` ```python create_resource(self, uri: str, params: dict[str, Any], context: Context | None = None) -> Resource diff --git a/docs/python-sdk/fastmcp-server-providers-openapi-provider.mdx b/docs/python-sdk/fastmcp-server-providers-openapi-provider.mdx index 6892d11746..ba3d7918bf 100644 --- a/docs/python-sdk/fastmcp-server-providers-openapi-provider.mdx +++ b/docs/python-sdk/fastmcp-server-providers-openapi-provider.mdx @@ -10,7 +10,7 @@ OpenAPIProvider for creating MCP components from OpenAPI specifications. ## Classes -### `OpenAPIProvider` +### `OpenAPIProvider` Provider that creates MCP components from an OpenAPI specification. @@ -21,7 +21,7 @@ spec. Each component makes HTTP calls to the described API endpoints. **Methods:** -#### `lifespan` +#### `lifespan` ```python lifespan(self) -> AsyncIterator[None] @@ -30,7 +30,7 @@ lifespan(self) -> AsyncIterator[None] Manage the lifecycle of the auto-created httpx client. -#### `get_tasks` +#### `get_tasks` ```python get_tasks(self) -> Sequence[FastMCPComponent] diff --git a/docs/python-sdk/fastmcp-server-providers-proxy.mdx b/docs/python-sdk/fastmcp-server-providers-proxy.mdx index 803adb9735..50c105bcf2 100644 --- a/docs/python-sdk/fastmcp-server-providers-proxy.mdx +++ b/docs/python-sdk/fastmcp-server-providers-proxy.mdx @@ -15,7 +15,7 @@ classes that forward execution to remote servers. ## Functions -### `default_proxy_roots_handler` +### `default_proxy_roots_handler` ```python default_proxy_roots_handler(context: RequestContext[ClientSession, LifespanContextT]) -> RootsList @@ -25,7 +25,7 @@ default_proxy_roots_handler(context: RequestContext[ClientSession, LifespanConte Forward list roots request from remote server to proxy's connected clients. -### `default_proxy_sampling_handler` +### `default_proxy_sampling_handler` ```python default_proxy_sampling_handler(messages: list[mcp.types.SamplingMessage], params: mcp.types.CreateMessageRequestParams, context: RequestContext[ClientSession, LifespanContextT]) -> mcp.types.CreateMessageResult @@ -35,7 +35,7 @@ default_proxy_sampling_handler(messages: list[mcp.types.SamplingMessage], params Forward sampling request from remote server to proxy's connected clients. -### `default_proxy_elicitation_handler` +### `default_proxy_elicitation_handler` ```python default_proxy_elicitation_handler(message: str, response_type: type, params: mcp.types.ElicitRequestParams, context: RequestContext[ClientSession, LifespanContextT]) -> ElicitResult @@ -45,7 +45,7 @@ default_proxy_elicitation_handler(message: str, response_type: type, params: mcp Forward elicitation request from remote server to proxy's connected clients. -### `default_proxy_log_handler` +### `default_proxy_log_handler` ```python default_proxy_log_handler(message: LogMessage) -> None @@ -55,7 +55,7 @@ default_proxy_log_handler(message: LogMessage) -> None Forward log notification from remote server to proxy's connected clients. -### `default_proxy_progress_handler` +### `default_proxy_progress_handler` ```python default_proxy_progress_handler(progress: float, total: float | None, message: str | None) -> None @@ -67,7 +67,7 @@ Forward progress notification from remote server to proxy's connected clients. ## Classes -### `ProxyTool` +### `ProxyTool` A Tool that represents and executes a tool on a remote server. @@ -75,7 +75,7 @@ A Tool that represents and executes a tool on a remote server. **Methods:** -#### `model_copy` +#### `model_copy` ```python model_copy(self, **kwargs: Any) -> ProxyTool @@ -84,7 +84,7 @@ model_copy(self, **kwargs: Any) -> ProxyTool Override to preserve _backend_name when name changes. -#### `from_mcp_tool` +#### `from_mcp_tool` ```python from_mcp_tool(cls, client_factory: ClientFactoryT, mcp_tool: mcp.types.Tool) -> ProxyTool @@ -93,7 +93,7 @@ from_mcp_tool(cls, client_factory: ClientFactoryT, mcp_tool: mcp.types.Tool) -> Factory method to create a ProxyTool from a raw MCP tool schema. -#### `run` +#### `run` ```python run(self, arguments: dict[str, Any], context: Context | None = None) -> ToolResult @@ -102,13 +102,13 @@ run(self, arguments: dict[str, Any], context: Context | None = None) -> ToolResu Executes the tool by making a call through the client. -#### `get_span_attributes` +#### `get_span_attributes` ```python get_span_attributes(self) -> dict[str, Any] ``` -### `ProxyResource` +### `ProxyResource` A Resource that represents and reads a resource from a remote server. @@ -116,7 +116,7 @@ A Resource that represents and reads a resource from a remote server. **Methods:** -#### `model_copy` +#### `model_copy` ```python model_copy(self, **kwargs: Any) -> ProxyResource @@ -125,7 +125,7 @@ model_copy(self, **kwargs: Any) -> ProxyResource Override to preserve _backend_uri when uri changes. -#### `from_mcp_resource` +#### `from_mcp_resource` ```python from_mcp_resource(cls, client_factory: ClientFactoryT, mcp_resource: mcp.types.Resource) -> ProxyResource @@ -134,7 +134,7 @@ from_mcp_resource(cls, client_factory: ClientFactoryT, mcp_resource: mcp.types.R Factory method to create a ProxyResource from a raw MCP resource schema. -#### `read` +#### `read` ```python read(self) -> ResourceResult @@ -143,13 +143,13 @@ read(self) -> ResourceResult Read the resource content from the remote server. -#### `get_span_attributes` +#### `get_span_attributes` ```python get_span_attributes(self) -> dict[str, Any] ``` -### `ProxyTemplate` +### `ProxyTemplate` A ResourceTemplate that represents and creates resources from a remote server template. @@ -157,7 +157,7 @@ A ResourceTemplate that represents and creates resources from a remote server te **Methods:** -#### `model_copy` +#### `model_copy` ```python model_copy(self, **kwargs: Any) -> ProxyTemplate @@ -166,7 +166,7 @@ model_copy(self, **kwargs: Any) -> ProxyTemplate Override to preserve _backend_uri_template when uri_template changes. -#### `from_mcp_template` +#### `from_mcp_template` ```python from_mcp_template(cls, client_factory: ClientFactoryT, mcp_template: mcp.types.ResourceTemplate) -> ProxyTemplate @@ -175,7 +175,7 @@ from_mcp_template(cls, client_factory: ClientFactoryT, mcp_template: mcp.types.R Factory method to create a ProxyTemplate from a raw MCP template schema. -#### `create_resource` +#### `create_resource` ```python create_resource(self, uri: str, params: dict[str, Any], context: Context | None = None) -> ProxyResource @@ -184,13 +184,13 @@ create_resource(self, uri: str, params: dict[str, Any], context: Context | None Create a resource from the template by calling the remote server. -#### `get_span_attributes` +#### `get_span_attributes` ```python get_span_attributes(self) -> dict[str, Any] ``` -### `ProxyPrompt` +### `ProxyPrompt` A Prompt that represents and renders a prompt from a remote server. @@ -198,7 +198,7 @@ A Prompt that represents and renders a prompt from a remote server. **Methods:** -#### `model_copy` +#### `model_copy` ```python model_copy(self, **kwargs: Any) -> ProxyPrompt @@ -207,7 +207,7 @@ model_copy(self, **kwargs: Any) -> ProxyPrompt Override to preserve _backend_name when name changes. -#### `from_mcp_prompt` +#### `from_mcp_prompt` ```python from_mcp_prompt(cls, client_factory: ClientFactoryT, mcp_prompt: mcp.types.Prompt) -> ProxyPrompt @@ -216,7 +216,7 @@ from_mcp_prompt(cls, client_factory: ClientFactoryT, mcp_prompt: mcp.types.Promp Factory method to create a ProxyPrompt from a raw MCP prompt schema. -#### `render` +#### `render` ```python render(self, arguments: dict[str, Any]) -> PromptResult @@ -225,13 +225,13 @@ render(self, arguments: dict[str, Any]) -> PromptResult Render the prompt by making a call through the client. -#### `get_span_attributes` +#### `get_span_attributes` ```python get_span_attributes(self) -> dict[str, Any] ``` -### `ProxyProvider` +### `ProxyProvider` Provider that proxies to a remote MCP server via a client factory. @@ -245,7 +245,7 @@ because tasks cannot be executed through a proxy. **Methods:** -#### `get_tasks` +#### `get_tasks` ```python get_tasks(self) -> Sequence[FastMCPComponent] @@ -258,7 +258,7 @@ server lifespan initialization, which would open the client before any context is set. All Proxy* components have task_config.mode="forbidden". -### `FastMCPProxy` +### `FastMCPProxy` A FastMCP server that acts as a proxy to a remote MCP-compliant server. @@ -267,7 +267,7 @@ This is a convenience wrapper that creates a FastMCP server with a ProxyProvider. For more control, use FastMCP with add_provider(ProxyProvider(...)). -### `ProxyClient` +### `ProxyClient` A proxy client that forwards advanced interactions between a remote MCP server and the proxy's connected clients. @@ -275,7 +275,7 @@ A proxy client that forwards advanced interactions between a remote MCP server a Supports forwarding roots, sampling, elicitation, logging, and progress. -### `StatefulProxyClient` +### `StatefulProxyClient` A proxy client that provides a stateful client factory for the proxy server. @@ -286,10 +286,17 @@ And it will be disconnected when the session is exited. This is useful to proxy a stateful mcp server such as the Playwright MCP server. Note that it is essential to ensure that the proxy server itself is also stateful. +Because session reuse means the receive-loop task inherits a stale +``request_ctx`` ContextVar snapshot, the default proxy handlers are +replaced with versions that restore the ContextVar before forwarding. +``ProxyTool.run`` stashes the current ``RequestContext`` in +``_proxy_rc_ref`` before each backend call, and the handlers consult +it to detect (and correct) staleness. + **Methods:** -#### `clear` +#### `clear` ```python clear(self) @@ -298,7 +305,7 @@ clear(self) Clear all cached clients and force disconnect them. -#### `new_stateful` +#### `new_stateful` ```python new_stateful(self) -> Client[ClientTransportT] diff --git a/docs/python-sdk/fastmcp-server-sampling-run.mdx b/docs/python-sdk/fastmcp-server-sampling-run.mdx index a251946ff1..657b9dd307 100644 --- a/docs/python-sdk/fastmcp-server-sampling-run.mdx +++ b/docs/python-sdk/fastmcp-server-sampling-run.mdx @@ -10,7 +10,7 @@ Sampling types and helper functions for FastMCP servers. ## Functions -### `determine_handler_mode` +### `determine_handler_mode` ```python determine_handler_mode(context: Context, needs_tools: bool) -> bool @@ -30,7 +30,7 @@ Determine whether to use fallback handler or client for sampling. - `ValueError`: If client lacks required capability and no fallback configured. -### `call_sampling_handler` +### `call_sampling_handler` ```python call_sampling_handler(context: Context, messages: list[SamplingMessage]) -> CreateMessageResult | CreateMessageResultWithTools @@ -44,10 +44,10 @@ sampling_handler is set via determine_handler_mode(). The checks below are safeguards against internal misuse. -### `execute_tools` +### `execute_tools` ```python -execute_tools(tool_calls: list[ToolUseContent], tool_map: dict[str, SamplingTool], mask_error_details: bool = False) -> list[ToolResultContent] +execute_tools(tool_calls: list[ToolUseContent], tool_map: dict[str, SamplingTool], mask_error_details: bool = False, tool_concurrency: int | None = None) -> list[ToolResultContent] ``` @@ -60,12 +60,18 @@ Execute tool calls and return results. When masked, only generic error messages are returned to the LLM. Tools can explicitly raise ToolError to bypass masking when they want to provide specific error messages to the LLM. +- `tool_concurrency`: Controls parallel execution of tools\: +- None (default)\: Sequential execution (one at a time) +- 0\: Unlimited parallel execution +- N > 0\: Execute at most N tools concurrently +If any tool has sequential=True, all tools execute sequentially +regardless of this setting. **Returns:** -- List of tool result content blocks. +- List of tool result content blocks in the same order as tool_calls. -### `prepare_messages` +### `prepare_messages` ```python prepare_messages(messages: str | Sequence[str | SamplingMessage]) -> list[SamplingMessage] @@ -75,17 +81,28 @@ prepare_messages(messages: str | Sequence[str | SamplingMessage]) -> list[Sampli Convert various message formats to a list of SamplingMessage objects. -### `prepare_tools` +### `prepare_tools` ```python -prepare_tools(tools: Sequence[SamplingTool | Callable[..., Any]] | None) -> list[SamplingTool] | None +prepare_tools(tools: Sequence[SamplingTool | FunctionTool | TransformedTool | Callable[..., Any]] | None) -> list[SamplingTool] | None ``` Convert tools to SamplingTool objects. +Accepts SamplingTool instances, FunctionTool instances, TransformedTool instances, +or plain callable functions. FunctionTool and TransformedTool are converted using +from_callable_tool(), while plain functions use from_function(). -### `extract_tool_calls` +**Args:** +- `tools`: Sequence of tools to prepare. Can be SamplingTool, FunctionTool, +TransformedTool, or plain callable functions. + +**Returns:** +- List of SamplingTool instances, or None if tools is None. + + +### `extract_tool_calls` ```python extract_tool_calls(response: CreateMessageResult | CreateMessageResultWithTools) -> list[ToolUseContent] @@ -95,7 +112,7 @@ extract_tool_calls(response: CreateMessageResult | CreateMessageResultWithTools) Extract tool calls from a response. -### `create_final_response_tool` +### `create_final_response_tool` ```python create_final_response_tool(result_type: type) -> SamplingTool @@ -108,7 +125,7 @@ This tool is used to capture structured responses from the LLM. The tool's schema is derived from the result_type. -### `sample_step_impl` +### `sample_step_impl` ```python sample_step_impl(context: Context, messages: str | Sequence[str | SamplingMessage]) -> SampleStep @@ -121,7 +138,7 @@ Make a single LLM sampling call. This is a stateless function that makes exactly one LLM call and optionally executes any requested tools. -### `sample_impl` +### `sample_impl` ```python sample_impl(context: Context, messages: str | Sequence[str | SamplingMessage]) -> SamplingResult[ResultT] @@ -137,7 +154,7 @@ provides a final text response. ## Classes -### `SamplingResult` +### `SamplingResult` Result of a sampling operation. @@ -148,7 +165,7 @@ Result of a sampling operation. - `history`: All messages exchanged during sampling. -### `SampleStep` +### `SampleStep` Result of a single sampling call. @@ -158,7 +175,7 @@ Represents what the LLM returned in this step plus the message history. **Methods:** -#### `is_tool_use` +#### `is_tool_use` ```python is_tool_use(self) -> bool @@ -167,7 +184,7 @@ is_tool_use(self) -> bool True if the LLM is requesting tool execution. -#### `text` +#### `text` ```python text(self) -> str | None @@ -176,7 +193,7 @@ text(self) -> str | None Extract text from the response, if available. -#### `tool_calls` +#### `tool_calls` ```python tool_calls(self) -> list[ToolUseContent] diff --git a/docs/python-sdk/fastmcp-server-sampling-sampling_tool.mdx b/docs/python-sdk/fastmcp-server-sampling-sampling_tool.mdx index 1231624f5f..c1650aa4f2 100644 --- a/docs/python-sdk/fastmcp-server-sampling-sampling_tool.mdx +++ b/docs/python-sdk/fastmcp-server-sampling-sampling_tool.mdx @@ -10,7 +10,7 @@ SamplingTool for use during LLM sampling requests. ## Classes -### `SamplingTool` +### `SamplingTool` A tool that can be used during LLM sampling. @@ -37,7 +37,7 @@ Create a SamplingTool explicitly when you need custom name/description: **Methods:** -#### `run` +#### `run` ```python run(self, arguments: dict[str, Any] | None = None) -> Any @@ -52,7 +52,7 @@ Execute the tool with the given arguments. - The result of executing the tool function. -#### `from_function` +#### `from_function` ```python from_function(cls, fn: Callable[..., Any]) -> SamplingTool @@ -67,6 +67,10 @@ the tool's parameters. Type hints are used to determine parameter types. - `fn`: The function to create a tool from. - `name`: Optional name override. Defaults to the function's name. - `description`: Optional description override. Defaults to the function's docstring. +- `sequential`: If True, this tool requires sequential execution and prevents +parallel execution of all tools in the batch. Set to True for tools +with shared state, file writes, or other operations that cannot run +concurrently. Defaults to False. **Returns:** - A SamplingTool wrapping the function. @@ -74,3 +78,24 @@ the tool's parameters. Type hints are used to determine parameter types. **Raises:** - `ValueError`: If the function is a lambda without a name override. + +#### `from_callable_tool` + +```python +from_callable_tool(cls, tool: FunctionTool | TransformedTool) -> SamplingTool +``` + +Create a SamplingTool from a FunctionTool or TransformedTool. + +Reuses existing server tools in sampling contexts. For TransformedTool, +the tool's .run() method is used to ensure proper argument transformation, +and the ToolResult is automatically unwrapped. + +**Args:** +- `tool`: A FunctionTool or TransformedTool to convert. +- `name`: Optional name override. Defaults to tool.name. +- `description`: Optional description override. Defaults to tool.description. + +**Raises:** +- `TypeError`: If the tool is not a FunctionTool or TransformedTool. + diff --git a/docs/python-sdk/fastmcp-server-server.mdx b/docs/python-sdk/fastmcp-server-server.mdx index 004fc53c8d..dddeb658ac 100644 --- a/docs/python-sdk/fastmcp-server-server.mdx +++ b/docs/python-sdk/fastmcp-server-server.mdx @@ -10,7 +10,7 @@ FastMCP - A more ergonomic interface for MCP servers. ## Functions -### `default_lifespan` +### `default_lifespan` ```python default_lifespan(server: FastMCP[LifespanResultT]) -> AsyncIterator[Any] @@ -26,7 +26,7 @@ Default lifespan context manager that does nothing. - An empty dictionary as the lifespan result. -### `create_proxy` +### `create_proxy` ```python create_proxy(target: Client[ClientTransportT] | ClientTransport | FastMCP[Any] | FastMCP1Server | AnyUrl | Path | MCPConfig | dict[str, Any] | str, **settings: Any) -> FastMCPProxy @@ -54,65 +54,74 @@ use `FastMCPProxy` or `ProxyProvider` directly from `fastmcp.server.providers.pr ## Classes -### `StateValue` +### `StateValue` Wrapper for stored context state values. -### `FastMCP` +### `FastMCP` **Methods:** -#### `settings` - -```python -settings(self) -> Settings -``` - -#### `name` +#### `name` ```python name(self) -> str ``` -#### `instructions` +#### `instructions` ```python instructions(self) -> str | None ``` -#### `instructions` +#### `instructions` ```python instructions(self, value: str | None) -> None ``` -#### `version` +#### `version` ```python version(self) -> str | None ``` -#### `website_url` +#### `website_url` ```python website_url(self) -> str | None ``` -#### `icons` +#### `icons` ```python icons(self) -> list[mcp.types.Icon] ``` -#### `add_middleware` +#### `local_provider` + +```python +local_provider(self) -> LocalProvider +``` + +The server's local provider, which stores directly-registered components. + +Use this to remove components: + + mcp.local_provider.remove_tool("my_tool") + mcp.local_provider.remove_resource("data://info") + mcp.local_provider.remove_prompt("my_prompt") + + +#### `add_middleware` ```python add_middleware(self, middleware: Middleware) -> None ``` -#### `add_provider` +#### `add_provider` ```python add_provider(self, provider: Provider) -> None @@ -132,7 +141,7 @@ always take precedence over providers. - Prompts become "namespace_promptname" -#### `get_tasks` +#### `get_tasks` ```python get_tasks(self) -> Sequence[FastMCPComponent] @@ -144,7 +153,7 @@ Overrides AggregateProvider.get_tasks() to apply server-level transforms after aggregation. AggregateProvider handles provider-level namespacing. -#### `add_transform` +#### `add_transform` ```python add_transform(self, transform: Transform) -> None @@ -159,7 +168,7 @@ They transform tools, resources, and prompts from ALL providers. - `transform`: The transform to add. -#### `add_tool_transformation` +#### `add_tool_transformation` ```python add_tool_transformation(self, tool_name: str, transformation: ToolTransformConfig) -> None @@ -171,7 +180,7 @@ Add a tool transformation. Use ``add_transform(ToolTransform({...}))`` instead. -#### `remove_tool_transformation` +#### `remove_tool_transformation` ```python remove_tool_transformation(self, _tool_name: str) -> None @@ -183,7 +192,7 @@ Remove a tool transformation. Tool transformations are now immutable. Use enable/disable controls instead. -#### `list_tools` +#### `list_tools` ```python list_tools(self) -> Sequence[Tool] @@ -196,7 +205,7 @@ and middleware execution. Returns all versions (no deduplication). Protocol handlers deduplicate for MCP wire format. -#### `get_tool` +#### `get_tool` ```python get_tool(self, name: str, version: VersionSpec | None = None) -> Tool | None @@ -216,7 +225,7 @@ session transforms can override provider-level disables. - The tool if found and enabled, None otherwise. -#### `list_resources` +#### `list_resources` ```python list_resources(self) -> Sequence[Resource] @@ -229,7 +238,7 @@ and middleware execution. Returns all versions (no deduplication). Protocol handlers deduplicate for MCP wire format. -#### `get_resource` +#### `get_resource` ```python get_resource(self, uri: str, version: VersionSpec | None = None) -> Resource | None @@ -248,7 +257,7 @@ transforms (including session-level) have been applied. - The resource if found and enabled, None otherwise. -#### `list_resource_templates` +#### `list_resource_templates` ```python list_resource_templates(self) -> Sequence[ResourceTemplate] @@ -261,7 +270,7 @@ auth filtering, and middleware execution. Returns all versions (no deduplication Protocol handlers deduplicate for MCP wire format. -#### `get_resource_template` +#### `get_resource_template` ```python get_resource_template(self, uri: str, version: VersionSpec | None = None) -> ResourceTemplate | None @@ -280,7 +289,7 @@ all transforms (including session-level) have been applied. - The template if found and enabled, None otherwise. -#### `list_prompts` +#### `list_prompts` ```python list_prompts(self) -> Sequence[Prompt] @@ -293,7 +302,7 @@ and middleware execution. Returns all versions (no deduplication). Protocol handlers deduplicate for MCP wire format. -#### `get_prompt` +#### `get_prompt` ```python get_prompt(self, name: str, version: VersionSpec | None = None) -> Prompt | None @@ -312,19 +321,19 @@ transforms (including session-level) have been applied. - The prompt if found and enabled, None otherwise. -#### `call_tool` +#### `call_tool` ```python call_tool(self, name: str, arguments: dict[str, Any] | None = None) -> ToolResult ``` -#### `call_tool` +#### `call_tool` ```python call_tool(self, name: str, arguments: dict[str, Any] | None = None) -> mcp.types.CreateTaskResult ``` -#### `call_tool` +#### `call_tool` ```python call_tool(self, name: str, arguments: dict[str, Any] | None = None) -> ToolResult | mcp.types.CreateTaskResult @@ -354,19 +363,19 @@ return ToolResult. - `ValidationError`: If arguments fail validation -#### `read_resource` +#### `read_resource` ```python read_resource(self, uri: str) -> ResourceResult ``` -#### `read_resource` +#### `read_resource` ```python read_resource(self, uri: str) -> mcp.types.CreateTaskResult ``` -#### `read_resource` +#### `read_resource` ```python read_resource(self, uri: str) -> ResourceResult | mcp.types.CreateTaskResult @@ -395,19 +404,19 @@ return ResourceResult. - `ResourceError`: If resource read fails -#### `render_prompt` +#### `render_prompt` ```python render_prompt(self, name: str, arguments: dict[str, Any] | None = None) -> PromptResult ``` -#### `render_prompt` +#### `render_prompt` ```python render_prompt(self, name: str, arguments: dict[str, Any] | None = None) -> mcp.types.CreateTaskResult ``` -#### `render_prompt` +#### `render_prompt` ```python render_prompt(self, name: str, arguments: dict[str, Any] | None = None) -> PromptResult | mcp.types.CreateTaskResult @@ -437,7 +446,7 @@ return PromptResult. - `PromptError`: If prompt rendering fails -#### `add_tool` +#### `add_tool` ```python add_tool(self, tool: Tool | Callable[..., Any]) -> Tool @@ -455,7 +464,7 @@ with the Context type annotation. See the @tool decorator for examples. - The tool instance that was added to the server. -#### `remove_tool` +#### `remove_tool` ```python remove_tool(self, name: str, version: str | None = None) -> None @@ -463,6 +472,9 @@ remove_tool(self, name: str, version: str | None = None) -> None Remove tool(s) from the server. +.. deprecated:: + Use ``mcp.local_provider.remove_tool(name)`` instead. + **Args:** - `name`: The name of the tool to remove. - `version`: If None, removes ALL versions. If specified, removes only that version. @@ -471,19 +483,19 @@ Remove tool(s) from the server. - `NotFoundError`: If no matching tool is found. -#### `tool` +#### `tool` ```python tool(self, name_or_fn: AnyFunction) -> FunctionTool ``` -#### `tool` +#### `tool` ```python tool(self, name_or_fn: str | None = None) -> Callable[[AnyFunction], FunctionTool] ``` -#### `tool` +#### `tool` ```python tool(self, name_or_fn: str | AnyFunction | None = None) -> Callable[[AnyFunction], FunctionTool] | FunctionTool | partial[Callable[[AnyFunction], FunctionTool] | FunctionTool] @@ -539,7 +551,7 @@ server.tool(my_function, name="custom_name") ``` -#### `add_resource` +#### `add_resource` ```python add_resource(self, resource: Resource | Callable[..., Any]) -> Resource | ResourceTemplate @@ -554,7 +566,7 @@ Add a resource to the server. - The resource instance that was added to the server. -#### `add_template` +#### `add_template` ```python add_template(self, template: ResourceTemplate) -> ResourceTemplate @@ -569,7 +581,7 @@ Add a resource template to the server. - The template instance that was added to the server. -#### `resource` +#### `resource` ```python resource(self, uri: str) -> Callable[[AnyFunction], Resource | ResourceTemplate | AnyFunction] @@ -628,7 +640,7 @@ async def get_weather(city: str) -> str: ``` -#### `add_prompt` +#### `add_prompt` ```python add_prompt(self, prompt: Prompt | Callable[..., Any]) -> Prompt @@ -643,19 +655,19 @@ Add a prompt to the server. - The prompt instance that was added to the server. -#### `prompt` +#### `prompt` ```python prompt(self, name_or_fn: AnyFunction) -> FunctionPrompt ``` -#### `prompt` +#### `prompt` ```python prompt(self, name_or_fn: str | None = None) -> Callable[[AnyFunction], FunctionPrompt] ``` -#### `prompt` +#### `prompt` ```python prompt(self, name_or_fn: str | AnyFunction | None = None) -> Callable[[AnyFunction], FunctionPrompt] | FunctionPrompt | partial[Callable[[AnyFunction], FunctionPrompt] | FunctionPrompt] @@ -732,7 +744,7 @@ Decorator to register a prompt. ``` -#### `mount` +#### `mount` ```python mount(self, server: FastMCP[LifespanResultT], namespace: str | None = None, as_proxy: bool | None = None, tool_names: dict[str, str] | None = None, prefix: str | None = None) -> None @@ -779,7 +791,7 @@ mounted server. - `prefix`: Deprecated. Use namespace instead. -#### `import_server` +#### `import_server` ```python import_server(self, server: FastMCP[LifespanResultT], prefix: str | None = None) -> None @@ -820,10 +832,10 @@ templates, and prompts are imported with their original names. objects are imported with their original names. -#### `from_openapi` +#### `from_openapi` ```python -from_openapi(cls, openapi_spec: dict[str, Any], client: httpx.AsyncClient | None = None, name: str = 'OpenAPI Server', route_maps: list[RouteMap] | None = None, route_map_fn: OpenAPIRouteMapFn | None = None, mcp_component_fn: OpenAPIComponentFn | None = None, mcp_names: dict[str, str] | None = None, tags: set[str] | None = None, **settings: Any) -> Self +from_openapi(cls, openapi_spec: dict[str, Any], client: httpx.AsyncClient | None = None, name: str = 'OpenAPI Server', route_maps: list[RouteMap] | None = None, route_map_fn: OpenAPIRouteMapFn | None = None, mcp_component_fn: OpenAPIComponentFn | None = None, mcp_names: dict[str, str] | None = None, tags: set[str] | None = None, validate_output: bool = True, **settings: Any) -> Self ``` Create a FastMCP server from an OpenAPI specification. @@ -839,13 +851,17 @@ server URL from the OpenAPI spec with a 30-second timeout. - `mcp_component_fn`: Optional callable for component customization - `mcp_names`: Optional dictionary mapping operationId to component names - `tags`: Optional set of tags to add to all components +- `validate_output`: If True (default), tools use the output schema +extracted from the OpenAPI spec for response validation. If +False, a permissive schema is used instead, allowing any +response structure while still returning structured JSON. - `**settings`: Additional settings passed to FastMCP **Returns:** - A FastMCP server with an OpenAPIProvider attached. -#### `from_fastapi` +#### `from_fastapi` ```python from_fastapi(cls, app: Any, name: str | None = None, route_maps: list[RouteMap] | None = None, route_map_fn: OpenAPIRouteMapFn | None = None, mcp_component_fn: OpenAPIComponentFn | None = None, mcp_names: dict[str, str] | None = None, httpx_client_kwargs: dict[str, Any] | None = None, tags: set[str] | None = None, **settings: Any) -> Self @@ -869,7 +885,7 @@ Use this to configure timeout and other client settings. - A FastMCP server with an OpenAPIProvider attached. -#### `as_proxy` +#### `as_proxy` ```python as_proxy(cls, backend: Client[ClientTransportT] | ClientTransport | FastMCP[Any] | FastMCP1Server | AnyUrl | Path | MCPConfig | dict[str, Any] | str, **settings: Any) -> FastMCPProxy @@ -887,7 +903,7 @@ instance or any value accepted as the `transport` argument of `fastmcp.client.Client` constructor. -#### `generate_name` +#### `generate_name` ```python generate_name(cls, name: str | None = None) -> str diff --git a/docs/python-sdk/fastmcp-server-tasks-elicitation.mdx b/docs/python-sdk/fastmcp-server-tasks-elicitation.mdx index 1b06baa8e1..3bdd697ef1 100644 --- a/docs/python-sdk/fastmcp-server-tasks-elicitation.mdx +++ b/docs/python-sdk/fastmcp-server-tasks-elicitation.mdx @@ -13,7 +13,7 @@ in Docket workers. Unlike regular MCP requests, background tasks don't have an active request context, so elicitation requires special handling: 1. Set task status to "input_required" via Redis -2. Send notifications/tasks/updated with elicitation metadata +2. Send notifications/tasks/status with elicitation metadata 3. Wait for client to send input via tasks/sendInput 4. Resume task execution with the provided input @@ -26,7 +26,7 @@ internal APIs for background task coordination. ### `elicit_for_task` ```python -elicit_for_task(task_id: str, session: ServerSession, message: str, schema: dict[str, Any], fastmcp: FastMCP) -> mcp.types.ElicitResult +elicit_for_task(task_id: str, session: ServerSession | None, message: str, schema: dict[str, Any], fastmcp: FastMCP) -> mcp.types.ElicitResult ``` @@ -50,7 +50,29 @@ in a Docket worker context where there's no active MCP request. - `McpError`: If the elicitation request fails -### `handle_task_input` +### `relay_elicitation` + +```python +relay_elicitation(session: ServerSession, session_id: str, task_id: str, elicitation: dict[str, Any], fastmcp: FastMCP) -> None +``` + + +Relay elicitation from a background task worker to the client. + +Called by the notification subscriber when it detects an input_required +notification with elicitation metadata. Sends a standard elicitation/create +request to the client session, then uses handle_task_input() to push the +response to Redis so the blocked worker can resume. + +**Args:** +- `session`: MCP ServerSession +- `session_id`: Session identifier +- `task_id`: Background task ID +- `elicitation`: Elicitation metadata (message, requestedSchema) +- `fastmcp`: FastMCP server instance + + +### `handle_task_input` ```python handle_task_input(task_id: str, session_id: str, action: str, content: dict[str, Any] | None, fastmcp: FastMCP) -> bool diff --git a/docs/python-sdk/fastmcp-server-tasks-handlers.mdx b/docs/python-sdk/fastmcp-server-tasks-handlers.mdx index bee4b0b931..94e094174a 100644 --- a/docs/python-sdk/fastmcp-server-tasks-handlers.mdx +++ b/docs/python-sdk/fastmcp-server-tasks-handlers.mdx @@ -13,7 +13,7 @@ Handles queuing tool/prompt/resource executions to Docket as background tasks. ## Functions -### `submit_to_docket` +### `submit_to_docket` ```python submit_to_docket(task_type: Literal['tool', 'resource', 'template', 'prompt'], key: str, component: Tool | Resource | ResourceTemplate | Prompt, arguments: dict[str, Any] | None = None, task_meta: TaskMeta | None = None) -> mcp.types.CreateTaskResult diff --git a/docs/python-sdk/fastmcp-server-tasks-notifications.mdx b/docs/python-sdk/fastmcp-server-tasks-notifications.mdx new file mode 100644 index 0000000000..6652d76001 --- /dev/null +++ b/docs/python-sdk/fastmcp-server-tasks-notifications.mdx @@ -0,0 +1,113 @@ +--- +title: notifications +sidebarTitle: notifications +--- + +# `fastmcp.server.tasks.notifications` + + +Distributed notification queue for background task events (SEP-1686). + +Enables distributed Docket workers to send MCP notifications to clients +without holding session references. Workers push to a Redis queue, +the MCP server process subscribes and forwards to the client's session. + +Pattern: Fire-and-forward with retry +- One queue per session_id +- LPUSH/BRPOP for reliable ordered delivery +- Retry up to 3 times on delivery failure, then discard +- TTL-based expiration for stale messages + +Note: Docket's execution.subscribe() handles task state/progress events via +Redis Pub/Sub. This module handles elicitation-specific notifications that +require reliable delivery (input_required prompts, cancel signals). + + +## Functions + +### `push_notification` + +```python +push_notification(session_id: str, notification: dict[str, Any], docket: Docket) -> None +``` + + +Push notification to session's queue (called from Docket worker). + +Used for elicitation-specific notifications (input_required, cancel) +that need reliable delivery across distributed processes. + +**Args:** +- `session_id`: Target session's identifier +- `notification`: MCP notification dict (method, params, _meta) +- `docket`: Docket instance for Redis access + + +### `notification_subscriber_loop` + +```python +notification_subscriber_loop(session_id: str, session: ServerSession, docket: Docket, fastmcp: FastMCP) -> None +``` + + +Subscribe to notification queue and forward to session. + +Runs in the MCP server process. Bridges distributed workers to clients. + +This loop: +1. Maintains a heartbeat (active subscriber marker for debugging) +2. Blocks on BRPOP waiting for notifications +3. Forwards notifications to the client's session +4. Retries failed deliveries, then discards (no dead-letter queue) + +**Args:** +- `session_id`: Session identifier to subscribe to +- `session`: MCP ServerSession for sending notifications +- `docket`: Docket instance for Redis access +- `fastmcp`: FastMCP server instance (for elicitation relay) + + +### `ensure_subscriber_running` + +```python +ensure_subscriber_running(session_id: str, session: ServerSession, docket: Docket, fastmcp: FastMCP) -> None +``` + + +Start notification subscriber if not already running (idempotent). + +Subscriber is created on first task submission and cleaned up on disconnect. +Safe to call multiple times for the same session. + +**Args:** +- `session_id`: Session identifier +- `session`: MCP ServerSession +- `docket`: Docket instance +- `fastmcp`: FastMCP server instance (for elicitation relay) + + +### `stop_subscriber` + +```python +stop_subscriber(session_id: str) -> None +``` + + +Stop notification subscriber for a session. + +Called when session disconnects. Pending messages remain in queue +for delivery if client reconnects (with TTL expiration). + +**Args:** +- `session_id`: Session identifier + + +### `get_subscriber_count` + +```python +get_subscriber_count() -> int +``` + + +Get number of active subscribers (for monitoring). + diff --git a/docs/python-sdk/fastmcp-settings.mdx b/docs/python-sdk/fastmcp-settings.mdx index 8c6470a39d..6b630615a0 100644 --- a/docs/python-sdk/fastmcp-settings.mdx +++ b/docs/python-sdk/fastmcp-settings.mdx @@ -7,15 +7,13 @@ sidebarTitle: settings ## Classes -### `DocketSettings` +### `DocketSettings` Docket worker configuration. -### `ExperimentalSettings` - -### `Settings` +### `Settings` FastMCP settings. @@ -23,7 +21,7 @@ FastMCP settings. **Methods:** -#### `get_setting` +#### `get_setting` ```python get_setting(self, attr: str) -> Any @@ -33,7 +31,7 @@ Get a setting. If the setting contains one or more `__`, it will be treated as a nested setting. -#### `set_setting` +#### `set_setting` ```python set_setting(self, attr: str, value: Any) -> None @@ -43,7 +41,7 @@ Set a setting. If the setting contains one or more `__`, it will be treated as a nested setting. -#### `normalize_log_level` +#### `normalize_log_level` ```python normalize_log_level(cls, v) diff --git a/docs/python-sdk/fastmcp-tools-function_tool.mdx b/docs/python-sdk/fastmcp-tools-function_tool.mdx index bd66818a0d..4f910a354a 100644 --- a/docs/python-sdk/fastmcp-tools-function_tool.mdx +++ b/docs/python-sdk/fastmcp-tools-function_tool.mdx @@ -10,7 +10,7 @@ Standalone @tool decorator for FastMCP. ## Functions -### `tool` +### `tool` ```python tool(name_or_fn: str | Callable[..., Any] | None = None) -> Any @@ -37,11 +37,11 @@ Protocol for functions decorated with @tool. Metadata attached to functions by the @tool decorator. -### `FunctionTool` +### `FunctionTool` **Methods:** -#### `to_mcp_tool` +#### `to_mcp_tool` ```python to_mcp_tool(self, **overrides: Any) -> mcp.types.Tool @@ -52,7 +52,7 @@ Convert the FastMCP tool to an MCP tool. Extends the base implementation to add task execution mode if enabled. -#### `from_function` +#### `from_function` ```python from_function(cls, fn: Callable[..., Any]) -> FunctionTool @@ -68,7 +68,7 @@ individual parameters must not be passed. Cannot be used together with metadata parameter. -#### `run` +#### `run` ```python run(self, arguments: dict[str, Any]) -> ToolResult @@ -77,7 +77,7 @@ run(self, arguments: dict[str, Any]) -> ToolResult Run the tool with arguments. -#### `register_with_docket` +#### `register_with_docket` ```python register_with_docket(self, docket: Docket) -> None @@ -89,7 +89,7 @@ FunctionTool registers the underlying function, which has the user's Depends parameters for docket to resolve. -#### `add_to_docket` +#### `add_to_docket` ```python add_to_docket(self, docket: Docket, arguments: dict[str, Any], **kwargs: Any) -> Execution diff --git a/docs/python-sdk/fastmcp-tools-tool.mdx b/docs/python-sdk/fastmcp-tools-tool.mdx index 1068a77160..f13d63f1c5 100644 --- a/docs/python-sdk/fastmcp-tools-tool.mdx +++ b/docs/python-sdk/fastmcp-tools-tool.mdx @@ -7,7 +7,7 @@ sidebarTitle: tool ## Functions -### `default_serializer` +### `default_serializer` ```python default_serializer(data: Any) -> str @@ -15,17 +15,17 @@ default_serializer(data: Any) -> str ## Classes -### `ToolResult` +### `ToolResult` **Methods:** -#### `to_mcp_result` +#### `to_mcp_result` ```python to_mcp_result(self) -> list[ContentBlock] | tuple[list[ContentBlock], dict[str, Any]] | CallToolResult ``` -### `Tool` +### `Tool` Internal tool registration info. @@ -33,7 +33,7 @@ Internal tool registration info. **Methods:** -#### `to_mcp_tool` +#### `to_mcp_tool` ```python to_mcp_tool(self, **overrides: Any) -> MCPTool @@ -42,7 +42,7 @@ to_mcp_tool(self, **overrides: Any) -> MCPTool Convert the FastMCP tool to an MCP tool. -#### `from_function` +#### `from_function` ```python from_function(cls, fn: Callable[..., Any]) -> FunctionTool @@ -51,7 +51,7 @@ from_function(cls, fn: Callable[..., Any]) -> FunctionTool Create a Tool from a function. -#### `run` +#### `run` ```python run(self, arguments: dict[str, Any]) -> ToolResult @@ -66,7 +66,7 @@ implemented by subclasses. (list of ContentBlocks, dict of structured output). -#### `convert_result` +#### `convert_result` ```python convert_result(self, raw_value: Any) -> ToolResult @@ -78,7 +78,7 @@ Handles ToolResult passthrough and converts raw values using the tool's attributes (serializer, output_schema) for proper conversion. -#### `register_with_docket` +#### `register_with_docket` ```python register_with_docket(self, docket: Docket) -> None @@ -87,7 +87,7 @@ register_with_docket(self, docket: Docket) -> None Register this tool with docket for background execution. -#### `add_to_docket` +#### `add_to_docket` ```python add_to_docket(self, docket: Docket, arguments: dict[str, Any], **kwargs: Any) -> Execution @@ -103,13 +103,13 @@ Schedule this tool for background execution via docket. - `**kwargs`: Additional kwargs passed to docket.add() -#### `from_tool` +#### `from_tool` ```python from_tool(cls, tool: Tool) -> TransformedTool ``` -#### `get_span_attributes` +#### `get_span_attributes` ```python get_span_attributes(self) -> dict[str, Any] diff --git a/docs/python-sdk/fastmcp-utilities-json_schema.mdx b/docs/python-sdk/fastmcp-utilities-json_schema.mdx index aa8c7b2d57..f45ea01618 100644 --- a/docs/python-sdk/fastmcp-utilities-json_schema.mdx +++ b/docs/python-sdk/fastmcp-utilities-json_schema.mdx @@ -60,17 +60,12 @@ the referenced definition while preserving $defs for nested references. ### `compress_schema` ```python -compress_schema(schema: dict[str, Any], prune_params: list[str] | None = None, prune_additional_properties: bool = False, prune_titles: bool = False) -> dict[str, Any] +compress_schema(schema: dict[str, Any], prune_params: list[str] | None = None, prune_additional_properties: bool = False, prune_titles: bool = False, dereference: bool = False) -> dict[str, Any] ``` Compress and optimize a JSON schema for MCP compatibility. -This function dereferences all $ref entries (inlining definitions) to ensure -compatibility with MCP clients that don't properly handle $ref in schemas -(e.g., VS Code Copilot). It also applies various optimizations to reduce -schema size. - **Args:** - `schema`: The schema to compress - `prune_params`: List of parameter names to remove from properties @@ -78,4 +73,7 @@ schema size. Defaults to False to maintain MCP client compatibility, as some clients (e.g., Claude) require additionalProperties\: false for strict validation. - `prune_titles`: Whether to remove title fields from the schema +- `dereference`: Whether to dereference $ref by inlining definitions. +Defaults to False; dereferencing is typically handled by +middleware at serve-time instead. diff --git a/docs/python-sdk/fastmcp-utilities-openapi-formatters.mdx b/docs/python-sdk/fastmcp-utilities-openapi-formatters.mdx index 1b725fc5a3..0b9d123693 100644 --- a/docs/python-sdk/fastmcp-utilities-openapi-formatters.mdx +++ b/docs/python-sdk/fastmcp-utilities-openapi-formatters.mdx @@ -72,26 +72,7 @@ format_json_for_description(data: Any, indent: int = 2) -> str Formats Python data as a JSON string block for Markdown. -### `format_simple_description` - -```python -format_simple_description(base_description: str, parameters: list[ParameterInfo] | None = None, request_body: RequestBodyInfo | None = None) -> str -``` - - -Formats a simple description for MCP objects (tools, resources, prompts). -Excludes response details, examples, and verbose status codes. - -**Args:** -- `base_description`: The initial description to be formatted. -- `parameters`: A list of parameter information. -- `request_body`: Information about the request body. - -**Returns:** -- The formatted description string with minimal details. - - -### `format_description_with_responses` +### `format_description_with_responses` ```python format_description_with_responses(base_description: str, responses: dict[str, Any], parameters: list[ParameterInfo] | None = None, request_body: RequestBodyInfo | None = None) -> str diff --git a/docs/servers/authorization.mdx b/docs/servers/authorization.mdx index 0ad0d569ee..a48d2a9e83 100644 --- a/docs/servers/authorization.mdx +++ b/docs/servers/authorization.mdx @@ -24,7 +24,7 @@ When an `AuthProvider` is configured, all requests to the MCP endpoint must carr ## Auth Checks -An auth check is any callable that accepts an `AuthContext` and returns a boolean. The `AuthContext` provides access to the current token (if any) and the component being accessed. +An auth check is any callable that accepts an `AuthContext` and returns a boolean. Auth checks can be synchronous or asynchronous, so checks that need to perform async operations (like reading server state or calling external services) work naturally. ```python from fastmcp.server.auth import AuthContext @@ -137,6 +137,34 @@ def advanced_feature() -> str: return "Advanced feature" ``` +### Async Auth Checks + +Auth checks can be `async` functions, which is useful when the authorization decision depends on asynchronous operations like reading server state or querying external services. + +```python +from fastmcp import FastMCP +from fastmcp.server.auth import AuthContext + +mcp = FastMCP("Async Auth Server") + +async def check_user_permissions(ctx: AuthContext) -> bool: + """Async auth check that reads server state.""" + if ctx.token is None: + return False + user_id = ctx.token.claims.get("sub") + # Async operations work naturally in auth checks + permissions = await fetch_user_permissions(user_id) + return "admin" in permissions + +@mcp.tool(auth=check_user_permissions) +def admin_tool() -> str: + return "Admin action completed" +``` + +Sync and async checks can be freely combined in a list β€” each check is handled according to its type. + +### Error Handling + Auth checks can raise exceptions for explicit denial with custom messages: - **`AuthorizationError`**: Propagates with its custom message, useful for explaining why access was denied @@ -346,7 +374,7 @@ def require_matching_tag(ctx: AuthContext) -> bool: from fastmcp.server.auth import ( AccessToken, # Token with .token, .client_id, .scopes, .expires_at, .claims AuthContext, # Context with .token, .component - AuthCheck, # Type alias: Callable[[AuthContext], bool] + AuthCheck, # Type alias: sync or async Callable[[AuthContext], bool] require_scopes, # Built-in: requires specific scopes restrict_tag, # Built-in: tag-based scope requirements run_auth_checks, # Utility: run checks with AND logic diff --git a/docs/servers/context.mdx b/docs/servers/context.mdx index 78e71e53c7..a10161baf7 100644 --- a/docs/servers/context.mdx +++ b/docs/servers/context.mdx @@ -238,14 +238,32 @@ async def get_counter(ctx: Context) -> int: Each client session has its own isolated stateβ€”two different clients calling `increment_counter` will each have their own counter. **Method signatures:** -- **`await ctx.set_state(key: str, value: Any) -> None`**: Store a value in session state -- **`await ctx.get_state(key: str) -> Any`**: Retrieve a value (returns None if not found) -- **`await ctx.delete_state(key: str) -> None`**: Remove a value from session state +- **`await ctx.set_state(key, value, *, serializable=True)`**: Store a value in session state +- **`await ctx.get_state(key)`**: Retrieve a value (returns None if not found) +- **`await ctx.delete_state(key)`**: Remove a value from session state State methods are async and require `await`. State expires after 1 day to prevent unbounded memory growth. +#### Non-Serializable Values + +By default, state values must be JSON-serializable (dicts, lists, strings, numbers, etc.) so they can be persisted across requests. For non-serializable values like HTTP clients or database connections, pass `serializable=False`: + +```python +@mcp.tool +async def my_tool(ctx: Context) -> str: + # This object can't be JSON-serialized + client = SomeHTTPClient(base_url="https://api.example.com") + await ctx.set_state("client", client, serializable=False) + + # Retrieve it later in the same request + client = await ctx.get_state("client") + return await client.fetch("/data") +``` + +Values stored with `serializable=False` only live for the current MCP request (a single tool call, resource read, or prompt render). They will not be available in subsequent requests within the session. + #### Custom Storage Backends By default, session state uses an in-memory store suitable for single-server deployments. For distributed or serverless deployments, provide a custom storage backend: diff --git a/docs/servers/dependency-injection.mdx b/docs/servers/dependency-injection.mdx index d986bc52a2..27dd3fd0cc 100644 --- a/docs/servers/dependency-injection.mdx +++ b/docs/servers/dependency-injection.mdx @@ -237,6 +237,37 @@ The `AccessToken` object provides: - **`expires_at`**: Token expiration timestamp (if available) - **`claims`**: Dictionary of all token claims (JWT claims or provider-specific data) +### Token Claims + +When you need just one specific value from the tokenβ€”like a user ID or tenant identifierβ€”`TokenClaim()` extracts it directly without needing the full token object. + +```python +from fastmcp import FastMCP +from fastmcp.server.dependencies import TokenClaim + +mcp = FastMCP("Demo") + + +@mcp.tool +async def add_expense( + amount: float, + user_id: str = TokenClaim("oid"), # Azure object ID +) -> dict: + await db.insert({"user_id": user_id, "amount": amount}) + return {"status": "created", "user_id": user_id} +``` + +`TokenClaim()` raises a `RuntimeError` if the claim doesn't exist, listing available claims to help with debugging. + +Common claims vary by identity provider: + +| Provider | User ID Claim | Email Claim | Name Claim | +|----------|--------------|-------------|------------| +| Azure/Entra | `oid` | `email` | `name` | +| GitHub | `sub` | `email` | `name` | +| Google | `sub` | `email` | `name` | +| Auth0 | `sub` | `email` | `name` | + ### Background Task Dependencies diff --git a/docs/servers/providers/local.mdx b/docs/servers/providers/local.mdx index ae69d90636..86726655ad 100644 --- a/docs/servers/providers/local.mdx +++ b/docs/servers/providers/local.mdx @@ -81,9 +81,9 @@ mcp.add_prompt(my_prompt) Remove components by name or URI: ```python -mcp.remove_tool("my_tool") -mcp.remove_resource("data://info") -mcp.remove_prompt("my_prompt") +mcp.local_provider.remove_tool("my_tool") +mcp.local_provider.remove_resource("data://info") +mcp.local_provider.remove_prompt("my_prompt") ``` ## Duplicate Handling diff --git a/docs/servers/sampling.mdx b/docs/servers/sampling.mdx index f15d50aa32..8ea479eb06 100644 --- a/docs/servers/sampling.mdx +++ b/docs/servers/sampling.mdx @@ -289,6 +289,45 @@ def search(query: str) -> str: `ToolError` messages always pass through to the LLM, making it the escape hatch for errors you want the LLM to see and handle. +### Concurrent Tool Execution + +By default, tools execute sequentially β€” one at a time, in order. When your tools are independent (no shared state between them), you can execute them in parallel with `tool_concurrency`: + +```python +result = await ctx.sample( + messages="Research these three topics", + tools=[search, fetch_url], + tool_concurrency=0, # Unlimited parallel execution +) +``` + +The `tool_concurrency` parameter controls how many tools run at once: + +- **`None`** (default): Sequential execution +- **`0`**: Unlimited parallel execution +- **`N > 0`**: Execute at most N tools concurrently + +For tools that must not run concurrently (file writes, shared state mutations, etc.), mark them as `sequential` when creating the `SamplingTool`: + +```python +from fastmcp.server.sampling import SamplingTool + +db_writer = SamplingTool.from_function( + write_to_db, + sequential=True, # Forces all tools in the batch to run sequentially +) + +result = await ctx.sample( + messages="Process this data", + tools=[search, db_writer], + tool_concurrency=0, # Would be parallel, but db_writer forces sequential +) +``` + + +When any tool in a batch has `sequential=True`, the entire batch executes sequentially regardless of `tool_concurrency`. This is a conservative guarantee β€” if one tool needs ordering, all tools in that batch respect it. + + ### Client Requirements @@ -463,6 +502,10 @@ tool_result = ToolResultContent( If True, mask detailed error messages from tool execution. When None (default), uses the global `settings.mask_error_details` value. Tools can raise `ToolError` to bypass masking and provide specific error messages to the LLM. + + Controls parallel execution of tools. `None` (default) for sequential, `0` for unlimited parallel, or a positive integer for bounded concurrency. If any tool has `sequential=True`, all tools execute sequentially regardless. + + @@ -511,6 +554,10 @@ tool_result = ToolResultContent( If True, mask detailed error messages from tool execution. + + + Controls parallel execution of tools. `None` (default) for sequential, `0` for unlimited parallel, or a positive integer for bounded concurrency. + diff --git a/docs/servers/tasks.mdx b/docs/servers/tasks.mdx index 2b38dc3f05..ef49046e9e 100644 --- a/docs/servers/tasks.mdx +++ b/docs/servers/tasks.mdx @@ -43,7 +43,7 @@ MCP background tasks are different: they're **protocol-native**. This means MCP Background tasks require the `tasks` extra: ```bash -pip install "fastmcp[tasks]>=3.0.0b2" +pip install "fastmcp[tasks]>=3.0.0rc1" ``` Add `task=True` to any tool, resource, resource template, or prompt decorator. This marks the component as capable of background execution. diff --git a/docs/servers/tools.mdx b/docs/servers/tools.mdx index 98c8c0951a..41d72f470b 100644 --- a/docs/servers/tools.mdx +++ b/docs/servers/tools.mdx @@ -175,6 +175,12 @@ By default, FastMCP converts Python functions into MCP tools by inspecting the f FastMCP automatically dereferences `$ref` entries in tool schemas to ensure compatibility with MCP clients that don't fully support JSON Schema references (e.g., VS Code Copilot, Claude Desktop). This means complex Pydantic models with shared types are inlined in the schema rather than using `$defs` references. + +Dereferencing happens at serve-time via middleware, so your schemas are stored with `$ref` intact and only inlined when sent to clients. If you know your clients handle `$ref` correctly and prefer smaller schemas, you can opt out: + +```python +mcp = FastMCP("my-server", dereference_schemas=False) +``` ### Type Annotations @@ -969,7 +975,7 @@ def example_tool() -> str: mcp.add_tool(example_tool) # Sends tools/list_changed notification mcp.disable(keys={"tool:example_tool"}) # Sends tools/list_changed notification mcp.enable(keys={"tool:example_tool"}) # Sends tools/list_changed notification -mcp.remove_tool("example_tool") # Sends tools/list_changed notification +mcp.local_provider.remove_tool("example_tool") # Sends tools/list_changed notification ``` Notifications are only sent when these operations occur within an active MCP request context (e.g., when called from within a tool or other MCP operation). Operations performed during server initialization do not trigger notifications. @@ -1054,7 +1060,7 @@ The duplicate behavior options are: -You can dynamically remove tools from a server using the `remove_tool` method: +You can dynamically remove tools from a server through its [local provider](/servers/providers/local): ```python from fastmcp import FastMCP @@ -1066,7 +1072,7 @@ def calculate_sum(a: int, b: int) -> int: """Add two numbers together.""" return a + b -mcp.remove_tool("calculate_sum") +mcp.local_provider.remove_tool("calculate_sum") ``` ## Versioning diff --git a/docs/servers/versioning.mdx b/docs/servers/versioning.mdx index bdcefb2171..2d180a851c 100644 --- a/docs/servers/versioning.mdx +++ b/docs/servers/versioning.mdx @@ -293,14 +293,14 @@ If the requested version doesn't exist, a `NotFoundError` is raised. ## Removing Versions -The `remove_tool`, `remove_resource`, and `remove_prompt` methods accept an optional `version` parameter that controls what gets removed. +The `remove_tool`, `remove_resource`, and `remove_prompt` methods on the server's [local provider](/servers/providers/local) accept an optional `version` parameter that controls what gets removed. ```python # Remove ALL versions of a component -mcp.remove_tool("calculate") +mcp.local_provider.remove_tool("calculate") # Remove only a specific version -mcp.remove_tool("calculate", version="1.0") +mcp.local_provider.remove_tool("calculate", version="1.0") ``` When you remove a specific version, other versions remain registered. When you remove without specifying a version, all versions are removed. @@ -332,5 +332,5 @@ Clients automatically see version 2.0 (the highest). During the transition, your Once the migration is complete, remove the old version. ```python -mcp.remove_tool("process_data", version="1.0") +mcp.local_provider.remove_tool("process_data", version="1.0") ``` diff --git a/docs/updates.mdx b/docs/updates.mdx index 26ab3be1e7..4a01afa189 100644 --- a/docs/updates.mdx +++ b/docs/updates.mdx @@ -5,6 +5,48 @@ icon: "sparkles" tag: NEW --- + + +FastMCP 3 RC1 means we believe the API is stable. Beta 2 drew a wave of real-world adoption β€” production deployments, migration reports, integration testing β€” and the feedback overwhelmingly confirmed that the architecture works. This release closes gaps that surfaced under load: auth flows that needed to be async, background tasks that needed reliable notification delivery, and APIs still carrying beta-era naming. If nothing unexpected surfaces, this is what 3.0.0 looks like. + +🚨 **Breaking Changes** β€” The `ui=` parameter is now `app=` with a unified `AppConfig` class, and 16 `FastMCP()` constructor kwargs have been removed after months of deprecation warnings. + +πŸ” **Auth Improvements** β€” Async `auth=` checks, Static Client Registration for servers without DCR, and declarative Azure OBO flows via dependency injection. + +⚑ **Concurrent Sampling** β€” `context.sample()` can now execute multiple tool calls in parallel with `tool_concurrency=0`. + +πŸ“‘ **Background Task Notifications** β€” A distributed Redis queue replaces polling for progress updates and elicitation relay. + +βœ… **OpenAPI Output Validation** β€” `validate_output=False` disables strict schema checking for imperfect backend APIs. + + + + + +Beta 2 reflects the huge number of people that kicked the tires on Beta 1. Seven new contributors landed changes, and early migration reports went smoother than expected. Most of Beta 2 is refinement β€” fixing what people found, filling gaps from real usage, hardening edges β€” but a few new features landed along the way. + +πŸ–₯️ **Client CLI** β€” `fastmcp list`, `fastmcp call`, `fastmcp discover`, and `fastmcp generate-cli` turn any MCP server into something you can poke at from a terminal. + +πŸ” **CIMD** (Client ID Metadata Documents) adds an alternative to Dynamic Client Registration for OAuth. + +πŸ“± **MCP Apps** β€” Spec-level compliance for the MCP Apps extension with `ui://` resource scheme and typed UI metadata. + +⏳ **Background Task Context** β€” `Context` now works transparently in Docket workers with Redis-based coordination. + +πŸ›‘οΈ **ResponseLimitingMiddleware** caps tool response sizes with UTF-8-safe truncation. + +πŸͺΏ **Goose Integration** β€” `fastmcp install goose` for one-command server installation into Goose. + + + dict[str, str]: async def get_server_details(): """Print information about mounted resources.""" # Print available tools - tools = await app.get_tools() + tools = await app.list_tools() print(f"\nAvailable tools ({len(tools)}):") - for _, tool in tools.items(): + for tool in tools: print(f" - {tool.name}: {tool.description}") # Print available resources @@ -82,20 +82,20 @@ async def get_server_details(): # Distinguish between native and imported resources # Native resources would be those directly in the main app (not prefixed) - resources = await app.get_resources() + resources = await app.list_resources() native_resources = [ - uri - for uri, _ in resources.items() - if urlparse(uri).netloc not in ("weather", "news") + str(r.uri) + for r in resources + if urlparse(str(r.uri)).netloc not in ("weather", "news") ] # Imported resources - categorized by source app weather_resources = [ - uri for uri, _ in resources.items() if urlparse(uri).netloc == "weather" + str(r.uri) for r in resources if urlparse(str(r.uri)).netloc == "weather" ] news_resources = [ - uri for uri, _ in resources.items() if urlparse(uri).netloc == "news" + str(r.uri) for r in resources if urlparse(str(r.uri)).netloc == "news" ] print(f" - Native app resources: {native_resources}") diff --git a/examples/tags_example.py b/examples/tags_example.py index fa79a60df4..c5ff6ae0ec 100644 --- a/examples/tags_example.py +++ b/examples/tags_example.py @@ -60,11 +60,11 @@ async def main(): ], ) - tools = await mcp1.get_tools() - resources = await mcp1.get_resources() + tools = await mcp1.list_tools() + resources = await mcp1.list_resources() - print(f"Tools ({len(tools)}): {', '.join(tools.keys())}") - print(f"Resources ({len(resources)}): {', '.join(resources.keys())}") + print(f"Tools ({len(tools)}): {', '.join(t.name for t in tools)}") + print(f"Resources ({len(resources)}): {', '.join(str(r.uri) for r in resources)}") print("\n=== Example 2: Exclude internal routes ===") @@ -80,11 +80,11 @@ async def main(): ], ) - tools = await mcp2.get_tools() - resources = await mcp2.get_resources() + tools = await mcp2.list_tools() + resources = await mcp2.list_resources() - print(f"Tools ({len(tools)}): {', '.join(tools.keys())}") - print(f"Resources ({len(resources)}): {', '.join(resources.keys())}") + print(f"Tools ({len(tools)}): {', '.join(t.name for t in tools)}") + print(f"Resources ({len(resources)}): {', '.join(str(r.uri) for r in resources)}") print("\n=== Example 3: Pattern + Tags combination ===") @@ -107,11 +107,11 @@ async def main(): ], ) - tools = await mcp3.get_tools() - resources = await mcp3.get_resources() + tools = await mcp3.list_tools() + resources = await mcp3.list_resources() - print(f"Tools ({len(tools)}): {', '.join(tools.keys())}") - print(f"Resources ({len(resources)}): {', '.join(resources.keys())}") + print(f"Tools ({len(tools)}): {', '.join(t.name for t in tools)}") + print(f"Resources ({len(resources)}): {', '.join(str(r.uri) for r in resources)}") print("\n=== Example 4: Multiple tag AND condition ===") @@ -130,11 +130,11 @@ async def main(): ], ) - tools = await mcp4.get_tools() - resources = await mcp4.get_resources() + tools = await mcp4.list_tools() + resources = await mcp4.list_resources() - print(f"Tools ({len(tools)}): {', '.join(tools.keys())}") - print(f"Resources ({len(resources)}): {', '.join(resources.keys())}") + print(f"Tools ({len(tools)}): {', '.join(t.name for t in tools)}") + print(f"Resources ({len(resources)}): {', '.join(str(r.uri) for r in resources)}") if __name__ == "__main__": diff --git a/examples/task_elicitation.py b/examples/task_elicitation.py new file mode 100644 index 0000000000..51f7b046ac --- /dev/null +++ b/examples/task_elicitation.py @@ -0,0 +1,82 @@ +""" +Background task elicitation demo. + +A background task (Docket) that pauses mid-execution to ask the user a +question, waits for the answer, then resumes and finishes. + +Works with both in-memory and Redis backends: + + # In-memory (single process, no Redis needed) + FASTMCP_DOCKET_URL=memory:// uv run python examples/task_elicitation.py + + # Redis (distributed, needs a worker running separately) + # Terminal 1: docker compose -f examples/tasks/docker-compose.yml up -d + # Terminal 2: FASTMCP_DOCKET_URL=redis://localhost:24242/0 \ + # uv run fastmcp tasks worker examples/task_elicitation.py + # Terminal 3: FASTMCP_DOCKET_URL=redis://localhost:24242/0 \ + # uv run python examples/task_elicitation.py + +Requires the `docket` extra (included in dev dependencies). +""" + +import asyncio +from dataclasses import dataclass + +from mcp.types import TextContent + +from fastmcp import Context, FastMCP +from fastmcp.client import Client +from fastmcp.server.elicitation import AcceptedElicitation + +mcp = FastMCP("Task Elicitation Demo") + + +@dataclass +class DinnerPrefs: + cuisine: str + vegetarian: bool + + +@mcp.tool(task=True) +async def plan_dinner(ctx: Context) -> str: + """Plan a dinner menu, asking the user what they're in the mood for.""" + + await ctx.report_progress(0, 2, "Asking what you'd like...") + + result = await ctx.elicit( + "What kind of dinner are you in the mood for?", + response_type=DinnerPrefs, + ) + + if not isinstance(result, AcceptedElicitation): + return "Dinner cancelled!" + + prefs = result.data + await ctx.report_progress(1, 2, "Planning your menu...") + await asyncio.sleep(1) + await ctx.report_progress(2, 2, "Done!") + + veg = "vegetarian " if prefs.vegetarian else "" + return f"Tonight's menu: a lovely {veg}{prefs.cuisine} dinner!" + + +async def handle_elicitation(message, response_type, params, context): + """Handle elicitation requests from background tasks.""" + print(f" Server asks: {message}") + print(" Responding with: cuisine=Thai, vegetarian=True") + return DinnerPrefs(cuisine="Thai", vegetarian=True) + + +async def main(): + async with Client(mcp, elicitation_handler=handle_elicitation) as client: + print("Starting background task...") + task = await client.call_tool("plan_dinner", {}, task=True) + print(f" task_id = {task.task_id}\n") + + result = await task.result() + assert isinstance(result.content[0], TextContent) + print(f"\nResult: {result.content[0].text}") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/testing_demo/uv.lock b/examples/testing_demo/uv.lock index 1ad5e38705..8f07579f96 100644 --- a/examples/testing_demo/uv.lock +++ b/examples/testing_demo/uv.lock @@ -304,67 +304,62 @@ wheels = [ [[package]] name = "cryptography" -version = "46.0.3" +version = "46.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, - { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, - { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, - { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, - { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, - { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, - { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, - { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, - { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, - { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, - { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, - { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, - { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, - { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, - { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, - { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, - { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, - { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, - { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, - { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, - { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, - { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, - { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, - { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, - { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, - { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, - { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, - { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, - { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, - { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, - { url = "https://files.pythonhosted.org/packages/d9/cd/1a8633802d766a0fa46f382a77e096d7e209e0817892929655fe0586ae32/cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32", size = 3689163, upload-time = "2025-10-15T23:18:13.821Z" }, - { url = "https://files.pythonhosted.org/packages/4c/59/6b26512964ace6480c3e54681a9859c974172fb141c38df11eadd8416947/cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c", size = 3429474, upload-time = "2025-10-15T23:18:15.477Z" }, - { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, - { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, - { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, - { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, - { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, + { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, + { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, + { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, + { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, + { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, + { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, + { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, + { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, + { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, + { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, + { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, + { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, + { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, + { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, + { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, + { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, + { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, + { url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" }, + { url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" }, + { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index 029b92d6bb..a6705a723b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ dependencies = [ "pydantic[email]>=2.11.7", "pyyaml>=6.0,<7.0", "pyperclip>=1.9.0", - "py-key-value-aio[disk,keyring,memory]>=0.3.0,<0.4.0", + "py-key-value-aio[disk,keyring,memory]>=0.4.0,<0.5.0", "uvicorn>=0.35", "websockets>=15.0.1", "jsonschema-path>=0.3.4", @@ -53,13 +53,14 @@ classifiers = [ [project.optional-dependencies] anthropic = ["anthropic>=0.40.0"] apps = ["prefab-ui>=0.1.0"] +azure = ["azure-identity>=1.16.0"] openai = ["openai>=1.102.0"] tasks = ["pydocket>=0.17.2"] [dependency-groups] dev = [ "dirty-equals>=0.9.0", - "fastmcp[anthropic,apps,openai,tasks]", + "fastmcp[anthropic,apps,azure,openai,tasks]", # add optional dependencies for fastmcp dev "fastapi>=0.115.12", "opentelemetry-sdk>=1.20.0", diff --git a/src/fastmcp/client/auth/oauth.py b/src/fastmcp/client/auth/oauth.py index 9fc90b4e89..a4d1e9c772 100644 --- a/src/fastmcp/client/auth/oauth.py +++ b/src/fastmcp/client/auth/oauth.py @@ -154,7 +154,12 @@ def __init__( additional_client_metadata: dict[str, Any] | None = None, callback_port: int | None = None, httpx_client_factory: McpHttpClientFactory | None = None, + # Alternative to dynamic client registration: + # --- Clients host a static JSON document at an HTTPS URL --- client_metadata_url: str | None = None, + # --- OR clients provide full client information --- + client_id: str | None = None, + client_secret: str | None = None, ): """ Initialize OAuth client provider for an MCP server. @@ -173,6 +178,9 @@ def __init__( provided, this URL is used as the client_id instead of performing Dynamic Client Registration. Must be an HTTPS URL with a non-root path (e.g. "https://myapp.example.com/oauth/client.json"). + client_id: Pre-registered OAuth client ID. When provided, skips dynamic + client registration and uses these static credentials instead. + client_secret: OAuth client secret (optional, used with client_id) """ # Store config for deferred binding if mcp_url not yet known self._scopes = scopes @@ -181,6 +189,9 @@ def __init__( self._additional_client_metadata = additional_client_metadata self._callback_port = callback_port self._client_metadata_url = client_metadata_url + self._client_id = client_id + self._client_secret = client_secret + self._static_client_info = None self.httpx_client_factory = httpx_client_factory or httpx.AsyncClient self._bound = False @@ -218,6 +229,23 @@ def _bind(self, mcp_url: str) -> None: **(self._additional_client_metadata or {}), ) + if self._client_id: + # Create the full static client info directly which will avoid DCR. + # Spread client_metadata so redirect_uris, grant_types, response_types, + # scope, etc. are included β€” servers may validate these fields. + metadata = client_metadata.model_dump(exclude_none=True) + # Default token_endpoint_auth_method based on whether a secret is + # provided, unless the caller already set it via additional_client_metadata. + if "token_endpoint_auth_method" not in metadata: + metadata["token_endpoint_auth_method"] = ( + "client_secret_post" if self._client_secret else "none" + ) + self._static_client_info = OAuthClientInformationFull( + client_id=self._client_id, + client_secret=self._client_secret, + **metadata, + ) + token_storage = self._token_storage or MemoryStore() if isinstance(token_storage, MemoryStore): @@ -230,6 +258,7 @@ def _bind(self, mcp_url: str) -> None: stacklevel=2, ) + # Use full URL for token storage to properly separate tokens per MCP endpoint self.token_storage_adapter: TokenStorageAdapter = TokenStorageAdapter( async_key_value=token_storage, server_url=mcp_url ) @@ -249,10 +278,12 @@ def _bind(self, mcp_url: str) -> None: async def _initialize(self) -> None: """Load stored tokens and client info, properly setting token expiry.""" - # Call parent's _initialize to load tokens and client info await super()._initialize() - # If tokens were loaded and have expires_in, update the context's token_expiry_time + if self._static_client_info is not None: + self.context.client_info = self._static_client_info + await self.token_storage_adapter.set_client_info(self._static_client_info) + if self.context.current_tokens and self.context.current_tokens.expires_in: self.context.update_token_expiry(self.context.current_tokens) @@ -342,6 +373,15 @@ async def async_auth_flow( break except ClientNotFoundError: + # Static credentials are fixed β€” retrying won't help. Surface the + # error so the user can correct their client_id / client_secret. + if self._static_client_info is not None: + raise ClientNotFoundError( + "OAuth server rejected the static client credentials. " + "Verify that the client_id (and client_secret, if provided) " + "are correct and that the client is registered with the server." + ) from None + logger.debug( "OAuth client not found on server, clearing cache and retrying..." ) diff --git a/src/fastmcp/client/mixins/prompts.py b/src/fastmcp/client/mixins/prompts.py index e7c8df80de..e8907c1946 100644 --- a/src/fastmcp/client/mixins/prompts.py +++ b/src/fastmcp/client/mixins/prompts.py @@ -70,12 +70,20 @@ async def list_prompts(self: Client) -> list[mcp.types.Prompt]: """ all_prompts: list[mcp.types.Prompt] = [] cursor: str | None = None + seen_cursors: set[str] = set() while True: result = await self.list_prompts_mcp(cursor=cursor) all_prompts.extend(result.prompts) - if result.nextCursor is None: + if not result.nextCursor: break + if result.nextCursor in seen_cursors: + logger.warning( + f"[{self.name}] Server returned duplicate pagination cursor" + f" {result.nextCursor!r} for list_prompts; stopping pagination" + ) + break + seen_cursors.add(result.nextCursor) cursor = result.nextCursor return all_prompts diff --git a/src/fastmcp/client/mixins/resources.py b/src/fastmcp/client/mixins/resources.py index 86de910d9b..f49f861f80 100644 --- a/src/fastmcp/client/mixins/resources.py +++ b/src/fastmcp/client/mixins/resources.py @@ -69,12 +69,20 @@ async def list_resources(self: Client) -> list[mcp.types.Resource]: """ all_resources: list[mcp.types.Resource] = [] cursor: str | None = None + seen_cursors: set[str] = set() while True: result = await self.list_resources_mcp(cursor=cursor) all_resources.extend(result.resources) - if result.nextCursor is None: + if not result.nextCursor: break + if result.nextCursor in seen_cursors: + logger.warning( + f"[{self.name}] Server returned duplicate pagination cursor" + f" {result.nextCursor!r} for list_resources; stopping pagination" + ) + break + seen_cursors.add(result.nextCursor) cursor = result.nextCursor return all_resources @@ -119,12 +127,21 @@ async def list_resource_templates(self: Client) -> list[mcp.types.ResourceTempla """ all_templates: list[mcp.types.ResourceTemplate] = [] cursor: str | None = None + seen_cursors: set[str] = set() while True: result = await self.list_resource_templates_mcp(cursor=cursor) all_templates.extend(result.resourceTemplates) - if result.nextCursor is None: + if not result.nextCursor: + break + if result.nextCursor in seen_cursors: + logger.warning( + f"[{self.name}] Server returned duplicate pagination cursor" + f" {result.nextCursor!r} for list_resource_templates;" + " stopping pagination" + ) break + seen_cursors.add(result.nextCursor) cursor = result.nextCursor return all_templates diff --git a/src/fastmcp/client/mixins/tools.py b/src/fastmcp/client/mixins/tools.py index 557af20be8..f702e4938f 100644 --- a/src/fastmcp/client/mixins/tools.py +++ b/src/fastmcp/client/mixins/tools.py @@ -73,12 +73,20 @@ async def list_tools(self: Client) -> list[mcp.types.Tool]: """ all_tools: list[mcp.types.Tool] = [] cursor: str | None = None + seen_cursors: set[str] = set() while True: result = await self.list_tools_mcp(cursor=cursor) all_tools.extend(result.tools) - if result.nextCursor is None: + if not result.nextCursor: break + if result.nextCursor in seen_cursors: + logger.warning( + f"[{self.name}] Server returned duplicate pagination cursor" + f" {result.nextCursor!r} for list_tools; stopping pagination" + ) + break + seen_cursors.add(result.nextCursor) cursor = result.nextCursor return all_tools diff --git a/src/fastmcp/dependencies.py b/src/fastmcp/dependencies.py index 87d5367a92..b23222e9dc 100644 --- a/src/fastmcp/dependencies.py +++ b/src/fastmcp/dependencies.py @@ -26,6 +26,7 @@ CurrentWorker, Progress, ProgressLike, + TokenClaim, ) __all__ = [ @@ -39,4 +40,5 @@ "Depends", "Progress", "ProgressLike", + "TokenClaim", ] diff --git a/src/fastmcp/experimental/utilities/openapi/__init__.py b/src/fastmcp/experimental/utilities/openapi/__init__.py index 51c947bf17..0c8dd15e51 100644 --- a/src/fastmcp/experimental/utilities/openapi/__init__.py +++ b/src/fastmcp/experimental/utilities/openapi/__init__.py @@ -10,7 +10,6 @@ RequestBodyInfo, ResponseInfo, extract_output_schema_from_responses, - format_simple_description, parse_openapi_to_http_routes, _combine_schemas, ) @@ -32,6 +31,5 @@ "ResponseInfo", "_combine_schemas", "extract_output_schema_from_responses", - "format_simple_description", "parse_openapi_to_http_routes", ] diff --git a/src/fastmcp/mcp_config.py b/src/fastmcp/mcp_config.py index f65132edbc..d4dbf2df52 100644 --- a/src/fastmcp/mcp_config.py +++ b/src/fastmcp/mcp_config.py @@ -76,7 +76,7 @@ def infer_transport_type_from_url( class _TransformingMCPServerMixin(FastMCPBaseModel): """A mixin that enables wrapping an MCP Server with tool transforms.""" - tools: dict[str, ToolTransformConfig] = Field(...) + tools: dict[str, ToolTransformConfig] = Field(default_factory=dict) """The multi-tool transform to apply to the tools.""" include_tags: set[str] | None = Field( @@ -89,6 +89,27 @@ class _TransformingMCPServerMixin(FastMCPBaseModel): description="The tags to exclude in the proxy.", ) + @model_validator(mode="before") + @classmethod + def _require_at_least_one_transform_field( + cls, values: dict[str, Any] + ) -> dict[str, Any]: + """Reject if none of the transforming fields are set. + + This ensures that plain server configs (without tools, include_tags, + or exclude_tags) fall through to the base server types during union + validation, avoiding unnecessary proxy wrapping. + """ + if isinstance(values, dict): + has_tools = bool(values.get("tools")) + has_include = values.get("include_tags") is not None + has_exclude = values.get("exclude_tags") is not None + if not (has_tools or has_include or has_exclude): + raise ValueError( + "At least one of 'tools', 'include_tags', or 'exclude_tags' is required" + ) + return values + def _to_server_and_underlying_transport( self, server_name: str | None = None, @@ -109,10 +130,13 @@ def _to_server_and_underlying_transport( wrapped_mcp_server = create_proxy( client, name=server_name, - include_tags=self.include_tags, - exclude_tags=self.exclude_tags, ) + if self.include_tags is not None: + wrapped_mcp_server.enable(tags=self.include_tags, only=True) + if self.exclude_tags is not None: + wrapped_mcp_server.disable(tags=self.exclude_tags) + # Apply tool transforms if configured if self.tools: from fastmcp.server.transforms import ToolTransform diff --git a/src/fastmcp/prompts/function_prompt.py b/src/fastmcp/prompts/function_prompt.py index 916838ede3..a58700a019 100644 --- a/src/fastmcp/prompts/function_prompt.py +++ b/src/fastmcp/prompts/function_prompt.py @@ -25,12 +25,12 @@ from fastmcp.decorators import resolve_task_config from fastmcp.exceptions import PromptError from fastmcp.prompts.prompt import Prompt, PromptArgument, PromptResult +from fastmcp.server.auth.authorization import AuthCheck from fastmcp.server.dependencies import ( transform_context_annotations, without_injected_parameters, ) from fastmcp.server.tasks.config import TaskConfig -from fastmcp.tools.tool import AuthCheckCallable from fastmcp.utilities.async_utils import call_sync_fn_in_threadpool from fastmcp.utilities.json_schema import compress_schema from fastmcp.utilities.logging import get_logger @@ -67,7 +67,7 @@ class PromptMeta: tags: set[str] | None = None meta: dict[str, Any] | None = None task: bool | TaskConfig | None = None - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None + auth: AuthCheck | list[AuthCheck] | None = None enabled: bool = True @@ -91,7 +91,7 @@ def from_function( tags: set[str] | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionPrompt: """Create a Prompt from a function. @@ -377,7 +377,7 @@ def prompt( tags: set[str] | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[F], F]: ... @overload def prompt( @@ -391,7 +391,7 @@ def prompt( tags: set[str] | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[F], F]: ... @@ -406,7 +406,7 @@ def prompt( tags: set[str] | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Any: """Standalone decorator to mark a function as an MCP prompt. diff --git a/src/fastmcp/prompts/prompt.py b/src/fastmcp/prompts/prompt.py index 8cf2d3265b..07540629e8 100644 --- a/src/fastmcp/prompts/prompt.py +++ b/src/fastmcp/prompts/prompt.py @@ -27,8 +27,8 @@ from pydantic import Field from pydantic.json_schema import SkipJsonSchema +from fastmcp.server.auth.authorization import AuthCheck from fastmcp.server.tasks.config import TaskConfig, TaskMeta -from fastmcp.tools.tool import AuthCheckCallable from fastmcp.utilities.components import FastMCPComponent from fastmcp.utilities.logging import get_logger from fastmcp.utilities.types import ( @@ -195,7 +195,7 @@ class Prompt(FastMCPComponent): arguments: list[PromptArgument] | None = Field( default=None, description="Arguments that can be passed to the prompt" ) - auth: SkipJsonSchema[AuthCheckCallable | list[AuthCheckCallable] | None] = Field( + auth: SkipJsonSchema[AuthCheck | list[AuthCheck] | None] = Field( default=None, description="Authorization checks for this prompt", exclude=True ) @@ -237,7 +237,7 @@ def from_function( tags: set[str] | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionPrompt: """Create a Prompt from a function. diff --git a/src/fastmcp/resources/function_resource.py b/src/fastmcp/resources/function_resource.py index 76c6f974c4..bf66735529 100644 --- a/src/fastmcp/resources/function_resource.py +++ b/src/fastmcp/resources/function_resource.py @@ -16,12 +16,12 @@ from fastmcp.decorators import resolve_task_config from fastmcp.resources.resource import Resource, ResourceResult from fastmcp.server.apps import resolve_ui_mime_type +from fastmcp.server.auth.authorization import AuthCheck from fastmcp.server.dependencies import ( transform_context_annotations, without_injected_parameters, ) from fastmcp.server.tasks.config import TaskConfig -from fastmcp.tools.tool import AuthCheckCallable from fastmcp.utilities.async_utils import call_sync_fn_in_threadpool if TYPE_CHECKING: @@ -57,7 +57,7 @@ class ResourceMeta: annotations: Annotations | None = None meta: dict[str, Any] | None = None task: bool | TaskConfig | None = None - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None + auth: AuthCheck | list[AuthCheck] | None = None enabled: bool = True @@ -94,7 +94,7 @@ def from_function( annotations: Annotations | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionResource: """Create a FunctionResource from a function. @@ -246,7 +246,7 @@ def resource( annotations: Annotations | dict[str, Any] | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[F], F]: """Standalone decorator to mark a function as an MCP resource. diff --git a/src/fastmcp/resources/resource.py b/src/fastmcp/resources/resource.py index 1f36fd5e28..26ed535dec 100644 --- a/src/fastmcp/resources/resource.py +++ b/src/fastmcp/resources/resource.py @@ -29,8 +29,8 @@ from pydantic.json_schema import SkipJsonSchema from typing_extensions import Self +from fastmcp.server.auth.authorization import AuthCheck from fastmcp.server.tasks.config import TaskConfig, TaskMeta -from fastmcp.tools.tool import AuthCheckCallable from fastmcp.utilities.components import FastMCPComponent @@ -227,7 +227,7 @@ class Resource(FastMCPComponent): Field(description="Optional annotations about the resource's behavior"), ] = None auth: Annotated[ - SkipJsonSchema[AuthCheckCallable | list[AuthCheckCallable] | None], + SkipJsonSchema[AuthCheck | list[AuthCheck] | None], Field(description="Authorization checks for this resource", exclude=True), ] = None @@ -247,7 +247,7 @@ def from_function( annotations: Annotations | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionResource: from fastmcp.resources.function_resource import ( FunctionResource, diff --git a/src/fastmcp/resources/template.py b/src/fastmcp/resources/template.py index 8ce6500737..c2fb1b6222 100644 --- a/src/fastmcp/resources/template.py +++ b/src/fastmcp/resources/template.py @@ -24,12 +24,12 @@ from fastmcp.resources.resource import Resource, ResourceResult from fastmcp.server.apps import resolve_ui_mime_type +from fastmcp.server.auth.authorization import AuthCheck from fastmcp.server.dependencies import ( transform_context_annotations, without_injected_parameters, ) from fastmcp.server.tasks.config import TaskConfig, TaskMeta -from fastmcp.tools.tool import AuthCheckCallable from fastmcp.utilities.components import FastMCPComponent from fastmcp.utilities.json_schema import compress_schema from fastmcp.utilities.types import get_cached_typeadapter @@ -117,7 +117,7 @@ class ResourceTemplate(FastMCPComponent): annotations: Annotations | None = Field( default=None, description="Optional annotations about the resource's behavior" ) - auth: SkipJsonSchema[AuthCheckCallable | list[AuthCheckCallable] | None] = Field( + auth: SkipJsonSchema[AuthCheck | list[AuthCheck] | None] = Field( default=None, description="Authorization checks for this resource template", exclude=True, @@ -140,7 +140,7 @@ def from_function( annotations: Annotations | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionResourceTemplate: return FunctionResourceTemplate.from_function( fn=fn, @@ -471,7 +471,7 @@ def from_function( annotations: Annotations | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionResourceTemplate: """Create a template from a function.""" diff --git a/src/fastmcp/server/auth/authorization.py b/src/fastmcp/server/auth/authorization.py index dd0e16cc17..8455b81f5b 100644 --- a/src/fastmcp/server/auth/authorization.py +++ b/src/fastmcp/server/auth/authorization.py @@ -28,8 +28,9 @@ def admin_prompt(): ... from __future__ import annotations +import inspect import logging -from collections.abc import Callable +from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import TYPE_CHECKING, cast @@ -70,8 +71,8 @@ def tool(self) -> Tool | None: return self.component if isinstance(self.component, Tool) else None -# Type alias for auth check functions -AuthCheck = Callable[[AuthContext], bool] +# Type alias for auth check functions (sync or async) +AuthCheck = Callable[[AuthContext], bool] | Callable[[AuthContext], Awaitable[bool]] def require_scopes(*scopes: str) -> AuthCheck: @@ -130,13 +131,14 @@ def check(ctx: AuthContext) -> bool: return check -def run_auth_checks( +async def run_auth_checks( checks: AuthCheck | list[AuthCheck], ctx: AuthContext, ) -> bool: """Run auth checks with AND logic. - All checks must pass for authorization to succeed. + All checks must pass for authorization to succeed. Checks can be + synchronous or asynchronous functions. Auth checks can: - Return True to allow access @@ -146,6 +148,7 @@ def run_auth_checks( Args: checks: A single check function or list of check functions. + Each check can be sync (returns bool) or async (returns Awaitable[bool]). ctx: The auth context to pass to each check. Returns: @@ -159,7 +162,10 @@ def run_auth_checks( for check in check_list: try: - if not check(ctx): + result = check(ctx) + if inspect.isawaitable(result): + result = await result + if not result: return False except AuthorizationError: # Let AuthorizationError propagate with its custom message diff --git a/src/fastmcp/server/auth/providers/azure.py b/src/fastmcp/server/auth/providers/azure.py index cc0d2544e9..b5974d8874 100644 --- a/src/fastmcp/server/auth/providers/azure.py +++ b/src/fastmcp/server/auth/providers/azure.py @@ -6,7 +6,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, cast from key_value.aio.protocols import AsyncKeyValue @@ -16,6 +16,7 @@ from fastmcp.utilities.logging import get_logger if TYPE_CHECKING: + from azure.identity.aio import OnBehalfOfCredential from mcp.server.auth.provider import AuthorizationParams from mcp.shared.auth import OAuthClientInformationFull @@ -161,6 +162,10 @@ def __init__( if "offline_access" not in parsed_additional_scopes: parsed_additional_scopes = [*parsed_additional_scopes, "offline_access"] + # Store Azure-specific config for OBO credential creation + self._tenant_id = tenant_id + self._base_authority = base_authority + # Apply defaults self.identifier_uri = identifier_uri or f"api://{client_id}" self.additional_authorize_scopes: list[str] = parsed_additional_scopes @@ -453,6 +458,33 @@ async def _extract_upstream_claims( logger.debug("Failed to extract Azure claims: %s", e) return None + def create_obo_credential(self, user_assertion: str) -> OnBehalfOfCredential: + """Create an OnBehalfOfCredential for OBO token exchange. + + Uses the AzureProvider's configuration (client_id, client_secret, + tenant_id, authority) to create a credential that can exchange the + user's token for downstream API tokens. + + Args: + user_assertion: The user's access token to exchange via OBO. + + Returns: + A configured OnBehalfOfCredential ready for get_token() calls. + + Raises: + ImportError: If azure-identity is not installed (requires fastmcp[azure]). + """ + _require_azure_identity("OBO token exchange") + from azure.identity.aio import OnBehalfOfCredential + + return OnBehalfOfCredential( + tenant_id=self._tenant_id, + client_id=self._upstream_client_id, + client_secret=self._upstream_client_secret.get_secret_value(), + user_assertion=user_assertion, + authority=f"https://{self._base_authority}", + ) + class AzureJWTVerifier(JWTVerifier): """JWT verifier pre-configured for Azure AD / Microsoft Entra ID. @@ -552,3 +584,117 @@ def scopes_supported(self) -> list[str]: else: prefixed.append(f"{self._identifier_uri}/{scope}") return prefixed + + +# --- Dependency injection support --- +# These require fastmcp[azure] extra for azure-identity + +# Check if DI engine is available +try: + from docket.dependencies import Dependency +except ImportError: + from fastmcp._vendor.docket_di import Dependency + + +def _require_azure_identity(feature: str) -> None: + """Raise ImportError with install instructions if azure-identity is not available.""" + try: + import azure.identity # noqa: F401 + except ImportError as e: + raise ImportError( + f"{feature} requires the `azure` extra. " + "Install with: pip install 'fastmcp[azure]'" + ) from e + + +class _EntraOBOToken(Dependency): # type: ignore[misc] + """Dependency that performs OBO token exchange for Microsoft Entra. + + Uses azure.identity's OnBehalfOfCredential for async-native OBO, + with automatic token caching and refresh. + """ + + def __init__(self, scopes: list[str]): + self.scopes = scopes + self._credential: OnBehalfOfCredential | None = None + + async def __aenter__(self) -> str: + _require_azure_identity("EntraOBOToken") + + from fastmcp.server.dependencies import get_access_token, get_server + + access_token = get_access_token() + if access_token is None: + raise RuntimeError( + "No access token available. Cannot perform OBO exchange." + ) + + server = get_server() + if not isinstance(server.auth, AzureProvider): + raise RuntimeError( + "EntraOBOToken requires an AzureProvider as the auth provider. " + f"Current provider: {type(server.auth).__name__}" + ) + + self._credential = server.auth.create_obo_credential( + user_assertion=access_token.token, + ) + + try: + result = await self._credential.get_token(*self.scopes) + except BaseException: + await self._credential.close() + self._credential = None + raise + + return result.token + + async def __aexit__(self, *args: object) -> None: + if self._credential is not None: + await self._credential.close() + self._credential = None + + +def EntraOBOToken(scopes: list[str]) -> str: + """Exchange the user's Entra token for a downstream API token via OBO. + + This dependency performs a Microsoft Entra On-Behalf-Of (OBO) token exchange, + allowing your MCP server to call downstream APIs (like Microsoft Graph) on + behalf of the authenticated user. + + Args: + scopes: The scopes to request for the downstream API. For Microsoft Graph, + use scopes like ["https://graph.microsoft.com/Mail.Read"] or + ["https://graph.microsoft.com/.default"]. + + Returns: + A dependency that resolves to the downstream API access token string + + Raises: + ImportError: If fastmcp[azure] is not installed + RuntimeError: If no access token is available, provider is not Azure, + or OBO exchange fails + + Example: + ```python + from fastmcp.server.auth.providers.azure import EntraOBOToken + import httpx + + @mcp.tool() + async def get_my_emails( + graph_token: str = EntraOBOToken(["https://graph.microsoft.com/Mail.Read"]) + ): + async with httpx.AsyncClient() as client: + resp = await client.get( + "https://graph.microsoft.com/v1.0/me/messages", + headers={"Authorization": f"Bearer {graph_token}"} + ) + return resp.json() + ``` + + Note: + For OBO to work, ensure the scopes are included in the AzureProvider's + `additional_authorize_scopes` parameter, and that admin consent has been + granted for those scopes in your Entra app registration. + """ + return cast(str, _EntraOBOToken(scopes)) diff --git a/src/fastmcp/server/context.py b/src/fastmcp/server/context.py index a9245fa2b1..c17e13f03c 100644 --- a/src/fastmcp/server/context.py +++ b/src/fastmcp/server/context.py @@ -161,6 +161,9 @@ async def my_tool(x: int, ctx: Context) -> str: await ctx.set_state("key", "value") value = await ctx.get_state("key") + # Store non-serializable values for the current request only + await ctx.set_state("client", http_client, serializable=False) + return str(x) ``` @@ -194,6 +197,8 @@ def __init__( self._tokens: list[Token] = [] # Background task support (SEP-1686) self._task_id: str | None = task_id + # Request-scoped state for non-serializable values (serializable=False) + self._request_state: dict[str, Any] = {} @property def is_background_task(self) -> bool: @@ -318,6 +323,10 @@ def lifespan_context(self) -> dict[str, Any]: Returns an empty dict if no lifespan was configured or if the MCP session is not yet established. + In background tasks (Docket workers), where request_context is not + available, falls back to reading from the FastMCP server's lifespan + result directly. + Example: ```python @server.tool @@ -330,6 +339,11 @@ def my_tool(ctx: Context) -> str: """ rc = self.request_context if rc is None: + # In background tasks, request_context is not available. + # Fall back to the server's lifespan result directly (#3095). + result = self.fastmcp._lifespan_result + if result is not None: + return result return {} return rc.lifespan_context @@ -338,9 +352,13 @@ async def report_progress( ) -> None: """Report progress for the current operation. + Works in both foreground (MCP progress notifications) and background + (Docket task execution) contexts. + Args: progress: Current progress value e.g. 24 total: Optional total value e.g. 100 + message: Optional status message describing current progress """ progress_token = ( @@ -349,16 +367,48 @@ async def report_progress( else None ) - if progress_token is None: + # Foreground: Send MCP progress notification if we have a token + if progress_token is not None: + await self.session.send_progress_notification( + progress_token=progress_token, + progress=progress, + total=total, + message=message, + related_request_id=self.request_id, + ) return - await self.session.send_progress_notification( - progress_token=progress_token, - progress=progress, - total=total, - message=message, - related_request_id=self.request_id, - ) + # Background: Update Docket execution progress (stored in Redis) + # This makes progress visible via tasks/get and notifications/tasks/status + from fastmcp.server.dependencies import is_docket_available + + if not is_docket_available(): + return + + try: + from docket.dependencies import Dependency + + # Get current execution from worker context + execution = Dependency.execution.get() + + # Update progress in Redis using Docket's progress API. + # Docket only exposes increment() (relative), so we compute + # the delta from the last reported value stored on this execution. + if total is not None: + await execution.progress.set_total(int(total)) + + current = int(progress) + last: int = getattr(execution, "_fastmcp_last_progress", 0) + delta = current - last + if delta > 0: + await execution.progress.increment(delta) + execution._fastmcp_last_progress = current # type: ignore[attr-defined] + + if message is not None: + await execution.progress.set_message(message) + except LookupError: + # Not running in Docket worker context - no progress tracking available + pass async def _paginate_list( self, @@ -378,12 +428,16 @@ async def _paginate_list( """ all_items: list[Any] = [] cursor: str | None = None + seen_cursors: set[str] = set() while True: request = request_factory(cursor) result = await call_method(request) all_items.extend(extract_items(result)) - if result.nextCursor is None: + if not result.nextCursor: + break + if result.nextCursor in seen_cursors: break + seen_cursors.add(result.nextCursor) cursor = result.nextCursor return all_items @@ -754,6 +808,7 @@ async def sample_step( tool_choice: ToolChoiceOption | str | None = None, execute_tools: bool = True, mask_error_details: bool | None = None, + tool_concurrency: int | None = None, ) -> SampleStep: """ Make a single LLM sampling call. @@ -777,6 +832,12 @@ async def sample_step( mask_error_details: If True, mask detailed error messages from tool execution. When None (default), uses the global settings value. Tools can raise ToolError to bypass masking. + tool_concurrency: Controls parallel execution of tools: + - None (default): Sequential execution (one at a time) + - 0: Unlimited parallel execution + - N > 0: Execute at most N tools concurrently + If any tool has sequential=True, all tools execute sequentially + regardless of this setting. Returns: SampleStep containing: @@ -810,6 +871,7 @@ async def sample_step( tool_choice=tool_choice, auto_execute_tools=execute_tools, mask_error_details=mask_error_details, + tool_concurrency=tool_concurrency, ) @overload @@ -824,6 +886,7 @@ async def sample( tools: Sequence[SamplingTool | Callable[..., Any]] | None = None, result_type: type[ResultT], mask_error_details: bool | None = None, + tool_concurrency: int | None = None, ) -> SamplingResult[ResultT]: """Overload: With result_type, returns SamplingResult[ResultT].""" @@ -839,6 +902,7 @@ async def sample( tools: Sequence[SamplingTool | Callable[..., Any]] | None = None, result_type: None = None, mask_error_details: bool | None = None, + tool_concurrency: int | None = None, ) -> SamplingResult[str]: """Overload: Without result_type, returns SamplingResult[str].""" @@ -853,6 +917,7 @@ async def sample( tools: Sequence[SamplingTool | Callable[..., Any]] | None = None, result_type: type[ResultT] | None = None, mask_error_details: bool | None = None, + tool_concurrency: int | None = None, ) -> SamplingResult[ResultT] | SamplingResult[str]: """ Send a sampling request to the client and await the response. @@ -883,6 +948,12 @@ async def sample( mask_error_details: If True, mask detailed error messages from tool execution. When None (default), uses the global settings value. Tools can raise ToolError to bypass masking. + tool_concurrency: Controls parallel execution of tools: + - None (default): Sequential execution (one at a time) + - 0: Unlimited parallel execution + - N > 0: Execute at most N tools concurrently + If any tool has sequential=True, all tools execute sequentially + regardless of this setting. Returns: SamplingResult[T] containing: @@ -906,6 +977,7 @@ async def sample( tools=tools, result_type=result_type, mask_error_details=mask_error_details, + tool_concurrency=tool_concurrency, ) @overload @@ -1079,7 +1151,7 @@ async def _elicit_for_task( return await elicit_for_task( task_id=self._task_id, # type: ignore[arg-type] - session=self.session, + session=self._session, message=message, schema=schema, fastmcp=self.fastmcp, @@ -1089,32 +1161,69 @@ def _make_state_key(self, key: str) -> str: """Create session-prefixed key for state storage.""" return f"{self.session_id}:{key}" - async def set_state(self, key: str, value: Any) -> None: - """Set a value in the session-scoped state store. + async def set_state( + self, key: str, value: Any, *, serializable: bool = True + ) -> None: + """Set a value in the state store. + + By default, values are stored in the session-scoped state store and + persist across requests within the same MCP session. Values must be + JSON-serializable (dicts, lists, strings, numbers, etc.). + + For non-serializable values (e.g., HTTP clients, database connections), + pass ``serializable=False``. These values are stored in a request-scoped + dict and only live for the current MCP request (tool call, resource + read, or prompt render). They will not be available in subsequent + requests. - Values persist across requests within the same MCP session. The key is automatically prefixed with the session identifier. - State expires after 1 day to prevent unbounded memory growth. """ prefixed_key = self._make_state_key(key) - await self.fastmcp._state_store.put( - key=prefixed_key, - value=StateValue(value=value), - ttl=self._STATE_TTL_SECONDS, - ) + if not serializable: + self._request_state[prefixed_key] = value + return + # Clear any request-scoped shadow so the session value is visible + self._request_state.pop(prefixed_key, None) + try: + await self.fastmcp._state_store.put( + key=prefixed_key, + value=StateValue(value=value), + ttl=self._STATE_TTL_SECONDS, + ) + except Exception as e: + # Catch serialization errors from Pydantic (ValueError) or + # the key_value library (SerializationError). Both contain + # "serialize" in the message. Other exceptions propagate as-is. + if "serialize" in str(e).lower(): + raise TypeError( + f"Value for state key {key!r} is not serializable. " + f"Use set_state({key!r}, value, serializable=False) to store " + f"non-serializable values. Note: non-serializable state is " + f"request-scoped and will not persist across requests." + ) from e + raise async def get_state(self, key: str) -> Any: - """Get a value from the session-scoped state store. + """Get a value from the state store. + + Checks request-scoped state first (set with ``serializable=False``), + then falls back to the session-scoped state store. Returns None if the key is not found. """ prefixed_key = self._make_state_key(key) + if prefixed_key in self._request_state: + return self._request_state[prefixed_key] result = await self.fastmcp._state_store.get(key=prefixed_key) return result.value if result is not None else None async def delete_state(self, key: str) -> None: - """Delete a value from the session-scoped state store.""" + """Delete a value from the state store. + + Removes from both request-scoped and session-scoped stores. + """ prefixed_key = self._make_state_key(key) + self._request_state.pop(prefixed_key, None) await self.fastmcp._state_store.delete(key=prefixed_key) # ------------------------------------------------------------------------- diff --git a/src/fastmcp/server/dependencies.py b/src/fastmcp/server/dependencies.py index ffef2561bb..acd7fea194 100644 --- a/src/fastmcp/server/dependencies.py +++ b/src/fastmcp/server/dependencies.py @@ -9,11 +9,13 @@ import contextlib import inspect +import logging import weakref from collections.abc import AsyncGenerator, Callable from contextlib import AsyncExitStack, asynccontextmanager -from contextvars import ContextVar +from contextvars import ContextVar, Token from dataclasses import dataclass +from datetime import datetime, timezone from functools import lru_cache from typing import TYPE_CHECKING, Any, Protocol, cast, get_type_hints, runtime_checkable @@ -33,6 +35,8 @@ from fastmcp.utilities.async_utils import call_sync_fn_in_threadpool from fastmcp.utilities.types import find_kwarg_by_type, is_class_member_of_type +_logger = logging.getLogger(__name__) + if TYPE_CHECKING: from docket import Docket from docket.worker import Worker @@ -53,6 +57,7 @@ "CurrentWorker", "Progress", "TaskContextInfo", + "TokenClaim", "get_access_token", "get_context", "get_http_headers", @@ -165,6 +170,9 @@ def get_task_session(session_id: str) -> ServerSession | None: ) _current_docket: ContextVar[Docket | None] = ContextVar("docket", default=None) _current_worker: ContextVar[Worker | None] = ContextVar("worker", default=None) +_task_access_token: ContextVar[AccessToken | None] = ContextVar( + "task_access_token", default=None +) # --- Docket availability check --- @@ -478,7 +486,8 @@ def get_access_token() -> AccessToken | None: This function first tries to get the token from the current HTTP request's scope, which is more reliable for long-lived connections where the SDK's auth_context_var may become stale after token refresh. Falls back to the SDK's context var if no - request is available. + request is available. In background tasks (Docket workers), falls back to the + token snapshot stored in Redis at task submission time. Returns: The access token if an authenticated user is available, None otherwise. @@ -501,6 +510,19 @@ def get_access_token() -> AccessToken | None: if access_token is None: access_token = _sdk_get_access_token() + # Fall back to background task snapshot (#3095) + # In Docket workers, neither HTTP request nor SDK context var are available. + # The token was snapshotted in Redis at submit_to_docket() time and restored + # into this ContextVar by _CurrentContext.__aenter__(). + if access_token is None: + task_token = _task_access_token.get() + if task_token is not None: + # Check expiration: if expires_at is set and past, treat as expired + if task_token.expires_at is not None: + if task_token.expires_at < int(datetime.now(timezone.utc).timestamp()): + return None + return task_token + if access_token is None or isinstance(access_token, AccessToken): return access_token @@ -718,14 +740,49 @@ async def resolve_dependencies( # so that get_dependency_parameters can detect them. +async def _restore_task_access_token( + session_id: str, task_id: str +) -> Token[AccessToken | None] | None: + """Restore the access token snapshot from Redis into a ContextVar. + + Called when setting up context in a Docket worker. The token was stored at + submit_to_docket() time. The token is restored regardless of expiration; + get_access_token() checks expiry when reading from the ContextVar. + + Returns: + The ContextVar token for resetting, or None if nothing was restored. + """ + docket = _current_docket.get() + if docket is None: + return None + + token_key = docket.key(f"fastmcp:task:{session_id}:{task_id}:access_token") + try: + async with docket.redis() as redis: + token_data = await redis.get(token_key) + if token_data is not None: + restored = AccessToken.model_validate_json(token_data) + return _task_access_token.set(restored) + except Exception: + _logger.warning( + "Failed to restore access token for task %s:%s", + session_id, + task_id, + exc_info=True, + ) + return None + + class _CurrentContext(Dependency): # type: ignore[misc] """Async context manager for Context dependency. In foreground (request) mode: returns the active context from _current_context. - In background (Docket worker) mode: creates a task-aware Context with task_id. + In background (Docket worker) mode: creates a task-aware Context with task_id + and restores the access token snapshot from Redis. """ _context: Context | None = None + _access_token_cv_token: Token[AccessToken | None] | None = None async def __aenter__(self) -> Context: from fastmcp.server.context import Context, _current_context @@ -750,6 +807,12 @@ async def __aenter__(self) -> Context: ) # Enter the context to set up ContextVars await self._context.__aenter__() + + # Restore access token snapshot from Redis (#3095) + self._access_token_cv_token = await _restore_task_access_token( + task_info.session_id, task_info.task_id + ) + return self._context # Neither foreground nor background context available @@ -761,6 +824,10 @@ async def __aenter__(self) -> Context: ) async def __aexit__(self, *args: object) -> None: + # Clean up access token ContextVar + if self._access_token_cv_token is not None: + _task_access_token.reset(self._access_token_cv_token) + self._access_token_cv_token = None # Clean up if we created a context for background task if self._context is not None: await self._context.__aexit__(*args) @@ -991,47 +1058,6 @@ async def get_auth_type(headers: dict = CurrentHeaders()) -> str: return cast(dict[str, str], _CurrentHeaders()) -class _CurrentAccessToken(Dependency): # type: ignore[misc] - """Async context manager for AccessToken dependency.""" - - async def __aenter__(self) -> AccessToken: - token = get_access_token() - if token is None: - raise RuntimeError( - "No access token found. Ensure authentication is configured " - "and the request is authenticated." - ) - return token - - async def __aexit__(self, *args: object) -> None: - pass - - -def CurrentAccessToken() -> AccessToken: - """Get the current access token for the authenticated user. - - This dependency provides access to the AccessToken for the current - authenticated request. Raises an error if no authentication is present. - - Returns: - A dependency that resolves to the active AccessToken - - Raises: - RuntimeError: If no authenticated user (use get_access_token() for optional) - - Example: - ```python - from fastmcp.server.dependencies import CurrentAccessToken - from fastmcp.server.auth import AccessToken - - @mcp.tool() - async def get_user_id(token: AccessToken = CurrentAccessToken()) -> str: - return token.claims.get("sub", "unknown") - ``` - """ - return cast(AccessToken, _CurrentAccessToken()) - - # --- Progress dependency --- @@ -1162,3 +1188,122 @@ async def __aenter__(self) -> ProgressLike: async def __aexit__(self, *args: object) -> None: pass + + +# --- Access Token dependency --- + + +class _CurrentAccessToken(Dependency): # type: ignore[misc] + """Async context manager for AccessToken dependency.""" + + _access_token_cv_token: Token[AccessToken | None] | None = None + + async def __aenter__(self) -> AccessToken: + token = get_access_token() + + # If no token found and we're in a Docket worker, try restoring from + # Redis. This handles the case where ctx: Context is not in the + # function signature, so _CurrentContext never ran the restoration. + if token is None: + task_info = get_task_context() + if task_info is not None: + self._access_token_cv_token = await _restore_task_access_token( + task_info.session_id, task_info.task_id + ) + token = get_access_token() + + if token is None: + raise RuntimeError( + "No access token found. Ensure authentication is configured " + "and the request is authenticated." + ) + return token + + async def __aexit__(self, *args: object) -> None: + if self._access_token_cv_token is not None: + _task_access_token.reset(self._access_token_cv_token) + self._access_token_cv_token = None + + +def CurrentAccessToken() -> AccessToken: + """Get the current access token for the authenticated user. + + This dependency provides access to the AccessToken for the current + authenticated request. Raises an error if no authentication is present. + + Returns: + A dependency that resolves to the active AccessToken + + Raises: + RuntimeError: If no authenticated user (use get_access_token() for optional) + + Example: + ```python + from fastmcp.server.dependencies import CurrentAccessToken + from fastmcp.server.auth import AccessToken + + @mcp.tool() + async def get_user_id(token: AccessToken = CurrentAccessToken()) -> str: + return token.claims.get("sub", "unknown") + ``` + """ + return cast(AccessToken, _CurrentAccessToken()) + + +# --- Token Claim dependency --- + + +class _TokenClaim(Dependency): # type: ignore[misc] + """Dependency that extracts a specific claim from the access token.""" + + def __init__(self, claim_name: str): + self.claim_name = claim_name + + async def __aenter__(self) -> str: + token = get_access_token() + if token is None: + raise RuntimeError( + f"No access token available. Cannot extract claim '{self.claim_name}'." + ) + value = token.claims.get(self.claim_name) + if value is None: + raise RuntimeError( + f"Claim '{self.claim_name}' not found in access token. " + f"Available claims: {list(token.claims.keys())}" + ) + return str(value) + + async def __aexit__(self, *args: object) -> None: + pass + + +def TokenClaim(name: str) -> str: + """Get a specific claim from the access token. + + This dependency extracts a single claim value from the current access token. + It's useful for getting user identifiers, roles, or other token claims + without needing the full token object. + + Args: + name: The name of the claim to extract (e.g., "oid", "sub", "email") + + Returns: + A dependency that resolves to the claim value as a string + + Raises: + RuntimeError: If no access token is available or claim is missing + + Example: + ```python + from fastmcp.server.dependencies import TokenClaim + + @mcp.tool() + async def add_expense( + user_id: str = TokenClaim("oid"), # Azure object ID + amount: float, + ): + # user_id is automatically injected from the token + await db.insert({"user_id": user_id, "amount": amount}) + ``` + """ + return cast(str, _TokenClaim(name)) diff --git a/src/fastmcp/server/middleware/authorization.py b/src/fastmcp/server/middleware/authorization.py index 6a50ed656f..abe33447bc 100644 --- a/src/fastmcp/server/middleware/authorization.py +++ b/src/fastmcp/server/middleware/authorization.py @@ -102,7 +102,7 @@ async def on_list_tools( authorized_tools: list[Tool] = [] for tool in tools: ctx = AuthContext(token=token, component=tool) - if run_auth_checks(self.auth, ctx): + if await run_auth_checks(self.auth, ctx): authorized_tools.append(tool) return authorized_tools @@ -143,7 +143,7 @@ async def on_call_tool( # Global auth check token = get_access_token() ctx = AuthContext(token=token, component=tool) - if not run_auth_checks(self.auth, ctx): + if not await run_auth_checks(self.auth, ctx): raise AuthorizationError( f"Authorization failed for tool '{tool_name}': insufficient permissions" ) @@ -169,7 +169,7 @@ async def on_list_resources( authorized_resources: list[Resource] = [] for resource in resources: ctx = AuthContext(token=token, component=resource) - if run_auth_checks(self.auth, ctx): + if await run_auth_checks(self.auth, ctx): authorized_resources.append(resource) return authorized_resources @@ -210,7 +210,7 @@ async def on_read_resource( # Global auth check token = get_access_token() ctx = AuthContext(token=token, component=component) - if not run_auth_checks(self.auth, ctx): + if not await run_auth_checks(self.auth, ctx): raise AuthorizationError( f"Authorization failed for resource '{uri}': insufficient permissions" ) @@ -238,7 +238,7 @@ async def on_list_resource_templates( authorized_templates: list[ResourceTemplate] = [] for template in templates: ctx = AuthContext(token=token, component=template) - if run_auth_checks(self.auth, ctx): + if await run_auth_checks(self.auth, ctx): authorized_templates.append(template) return authorized_templates @@ -262,7 +262,7 @@ async def on_list_prompts( authorized_prompts: list[Prompt] = [] for prompt in prompts: ctx = AuthContext(token=token, component=prompt) - if run_auth_checks(self.auth, ctx): + if await run_auth_checks(self.auth, ctx): authorized_prompts.append(prompt) return authorized_prompts @@ -301,7 +301,7 @@ async def on_get_prompt( # Global auth check token = get_access_token() ctx = AuthContext(token=token, component=prompt) - if not run_auth_checks(self.auth, ctx): + if not await run_auth_checks(self.auth, ctx): raise AuthorizationError( f"Authorization failed for prompt '{prompt_name}': insufficient permissions" ) diff --git a/src/fastmcp/server/middleware/caching.py b/src/fastmcp/server/middleware/caching.py index d4ac159295..670c30a44f 100644 --- a/src/fastmcp/server/middleware/caching.py +++ b/src/fastmcp/server/middleware/caching.py @@ -243,43 +243,41 @@ def __init__( call_tool_settings or CallToolSettings() ) - # PydanticAdapter type signature will be fixed to accept generic aliases - # See: https://github.com/strawgate/py-key-value/pull/250 self._list_tools_cache: PydanticAdapter[list[Tool]] = PydanticAdapter( key_value=self._stats, - pydantic_model=list[Tool], # type: ignore[arg-type] + pydantic_model=list[Tool], default_collection="tools/list", ) self._list_resources_cache: PydanticAdapter[list[Resource]] = PydanticAdapter( key_value=self._stats, - pydantic_model=list[Resource], # type: ignore[arg-type] + pydantic_model=list[Resource], default_collection="resources/list", ) self._list_prompts_cache: PydanticAdapter[list[Prompt]] = PydanticAdapter( key_value=self._stats, - pydantic_model=list[Prompt], # type: ignore[arg-type] + pydantic_model=list[Prompt], default_collection="prompts/list", ) self._read_resource_cache: PydanticAdapter[CachableResourceResult] = ( PydanticAdapter( key_value=self._stats, - pydantic_model=CachableResourceResult, # type: ignore[arg-type] + pydantic_model=CachableResourceResult, default_collection="resources/read", ) ) self._get_prompt_cache: PydanticAdapter[CachablePromptResult] = PydanticAdapter( key_value=self._stats, - pydantic_model=CachablePromptResult, # type: ignore[arg-type] + pydantic_model=CachablePromptResult, default_collection="prompts/get", ) self._call_tool_cache: PydanticAdapter[CachableToolResult] = PydanticAdapter( key_value=self._stats, - pydantic_model=CachableToolResult, # type: ignore[arg-type] + pydantic_model=CachableToolResult, default_collection="tools/call", ) diff --git a/src/fastmcp/server/middleware/dereference.py b/src/fastmcp/server/middleware/dereference.py new file mode 100644 index 0000000000..89150d655e --- /dev/null +++ b/src/fastmcp/server/middleware/dereference.py @@ -0,0 +1,78 @@ +"""Middleware that dereferences $ref in JSON schemas before sending to clients.""" + +from collections.abc import Sequence +from typing import Any + +import mcp.types as mt +from typing_extensions import override + +from fastmcp.resources.template import ResourceTemplate +from fastmcp.server.middleware.middleware import CallNext, Middleware, MiddlewareContext +from fastmcp.tools.tool import Tool +from fastmcp.utilities.json_schema import dereference_refs + + +class DereferenceRefsMiddleware(Middleware): + """Dereferences $ref in component schemas before sending to clients. + + Some MCP clients (e.g., VS Code Copilot) don't handle JSON Schema $ref + properly. This middleware inlines all $ref definitions so schemas are + self-contained. Enabled by default via ``FastMCP(dereference_schemas=True)``. + """ + + @override + async def on_list_tools( + self, + context: MiddlewareContext[mt.ListToolsRequest], + call_next: CallNext[mt.ListToolsRequest, Sequence[Tool]], + ) -> Sequence[Tool]: + tools = await call_next(context) + return [_dereference_tool(tool) for tool in tools] + + @override + async def on_list_resource_templates( + self, + context: MiddlewareContext[mt.ListResourceTemplatesRequest], + call_next: CallNext[ + mt.ListResourceTemplatesRequest, Sequence[ResourceTemplate] + ], + ) -> Sequence[ResourceTemplate]: + templates = await call_next(context) + return [_dereference_resource_template(t) for t in templates] + + +def _dereference_tool(tool: Tool) -> Tool: + """Return a copy of the tool with dereferenced schemas.""" + updates: dict[str, object] = {} + if "$defs" in tool.parameters or _has_ref(tool.parameters): + updates["parameters"] = dereference_refs(tool.parameters) + if tool.output_schema is not None and ( + "$defs" in tool.output_schema or _has_ref(tool.output_schema) + ): + updates["output_schema"] = dereference_refs(tool.output_schema) + if updates: + return tool.model_copy(update=updates) + return tool + + +def _dereference_resource_template(template: ResourceTemplate) -> ResourceTemplate: + """Return a copy of the template with dereferenced schemas.""" + if "$defs" in template.parameters or _has_ref(template.parameters): + return template.model_copy( + update={"parameters": dereference_refs(template.parameters)} + ) + return template + + +def _has_ref(schema: dict[str, Any]) -> bool: + """Check if a schema contains any $ref.""" + if "$ref" in schema: + return True + for value in schema.values(): + if isinstance(value, dict) and _has_ref(value): + return True + if isinstance(value, list): + for item in value: + if isinstance(item, dict) and _has_ref(item): + return True + return False diff --git a/src/fastmcp/server/mixins/transport.py b/src/fastmcp/server/mixins/transport.py index 9e797dc1fe..1f069a02df 100644 --- a/src/fastmcp/server/mixins/transport.py +++ b/src/fastmcp/server/mixins/transport.py @@ -231,17 +231,15 @@ async def run_http_async( # Resolve from settings/env var if not explicitly set if stateless_http is None: - stateless_http = self._deprecated_settings.stateless_http + stateless_http = fastmcp.settings.stateless_http # SSE doesn't support stateless mode if stateless_http and transport == "sse": raise ValueError("SSE transport does not support stateless mode") - host = host or self._deprecated_settings.host - port = port or self._deprecated_settings.port - default_log_level_to_use = ( - log_level or self._deprecated_settings.log_level - ).lower() + host = host or fastmcp.settings.host + port = port or fastmcp.settings.port + default_log_level_to_use = (log_level or fastmcp.settings.log_level).lower() app = self.http_app( path=path, @@ -311,31 +309,30 @@ def http_app( if transport in ("streamable-http", "http"): return create_streamable_http_app( server=self, - streamable_http_path=path - or self._deprecated_settings.streamable_http_path, + streamable_http_path=path or fastmcp.settings.streamable_http_path, event_store=event_store, retry_interval=retry_interval, auth=self.auth, json_response=( json_response if json_response is not None - else self._deprecated_settings.json_response + else fastmcp.settings.json_response ), stateless_http=( stateless_http if stateless_http is not None - else self._deprecated_settings.stateless_http + else fastmcp.settings.stateless_http ), - debug=self._deprecated_settings.debug, + debug=fastmcp.settings.debug, middleware=middleware, ) elif transport == "sse": return create_sse_app( server=self, - message_path=self._deprecated_settings.message_path, - sse_path=path or self._deprecated_settings.sse_path, + message_path=fastmcp.settings.message_path, + sse_path=path or fastmcp.settings.sse_path, auth=self.auth, - debug=self._deprecated_settings.debug, + debug=fastmcp.settings.debug, middleware=middleware, ) else: diff --git a/src/fastmcp/server/providers/local_provider/decorators/prompts.py b/src/fastmcp/server/providers/local_provider/decorators/prompts.py index a25d8fa522..5e01b7a048 100644 --- a/src/fastmcp/server/providers/local_provider/decorators/prompts.py +++ b/src/fastmcp/server/providers/local_provider/decorators/prompts.py @@ -17,8 +17,8 @@ import fastmcp from fastmcp.prompts.function_prompt import FunctionPrompt from fastmcp.prompts.prompt import Prompt +from fastmcp.server.auth.authorization import AuthCheck from fastmcp.server.tasks.config import TaskConfig -from fastmcp.tools.tool import AuthCheckCallable if TYPE_CHECKING: from fastmcp.server.providers.local_provider import LocalProvider @@ -82,7 +82,7 @@ def prompt( enabled: bool = True, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionPrompt: ... @overload @@ -99,7 +99,7 @@ def prompt( enabled: bool = True, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[AnyFunction], FunctionPrompt]: ... def prompt( @@ -115,7 +115,7 @@ def prompt( enabled: bool = True, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> ( Callable[[AnyFunction], FunctionPrompt] | FunctionPrompt diff --git a/src/fastmcp/server/providers/local_provider/decorators/resources.py b/src/fastmcp/server/providers/local_provider/decorators/resources.py index f6985b164b..52314378e7 100644 --- a/src/fastmcp/server/providers/local_provider/decorators/resources.py +++ b/src/fastmcp/server/providers/local_provider/decorators/resources.py @@ -17,8 +17,8 @@ from fastmcp.resources.function_resource import resource as standalone_resource from fastmcp.resources.resource import Resource from fastmcp.resources.template import ResourceTemplate +from fastmcp.server.auth.authorization import AuthCheck from fastmcp.server.tasks.config import TaskConfig -from fastmcp.tools.tool import AuthCheckCallable if TYPE_CHECKING: from fastmcp.server.providers.local_provider import LocalProvider @@ -117,7 +117,7 @@ def resource( annotations: Annotations | dict[str, Any] | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[AnyFunction], Resource | ResourceTemplate | AnyFunction]: """Decorator to register a function as a resource. diff --git a/src/fastmcp/server/providers/local_provider/decorators/tools.py b/src/fastmcp/server/providers/local_provider/decorators/tools.py index 001d0780c1..3fbd78783d 100644 --- a/src/fastmcp/server/providers/local_provider/decorators/tools.py +++ b/src/fastmcp/server/providers/local_provider/decorators/tools.py @@ -16,14 +16,15 @@ from mcp.types import AnyFunction, ToolAnnotations import fastmcp +from fastmcp.server.auth.authorization import AuthCheck from fastmcp.server.tasks.config import TaskConfig from fastmcp.tools.function_tool import FunctionTool -from fastmcp.tools.tool import AuthCheckCallable, Tool +from fastmcp.tools.tool import Tool from fastmcp.utilities.types import NotSet, NotSetT try: - from prefab_ui import UIResponse as _PrefabUIResponse from prefab_ui.components.base import Component as _PrefabComponent + from prefab_ui.response import UIResponse as _PrefabUIResponse _HAS_PREFAB = True except ImportError: @@ -207,7 +208,7 @@ def tool( task: bool | TaskConfig | None = None, serializer: ToolResultSerializerType | None = None, # Deprecated timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionTool: ... @overload @@ -229,7 +230,7 @@ def tool( task: bool | TaskConfig | None = None, serializer: ToolResultSerializerType | None = None, # Deprecated timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[AnyFunction], FunctionTool]: ... # NOTE: This method mirrors fastmcp.tools.tool() but adds registration, @@ -254,7 +255,7 @@ def tool( task: bool | TaskConfig | None = None, serializer: ToolResultSerializerType | None = None, # Deprecated timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> ( Callable[[AnyFunction], FunctionTool] | FunctionTool diff --git a/src/fastmcp/server/providers/openapi/components.py b/src/fastmcp/server/providers/openapi/components.py index 4e6d18f1ef..6b942d22e2 100644 --- a/src/fastmcp/server/providers/openapi/components.py +++ b/src/fastmcp/server/providers/openapi/components.py @@ -159,23 +159,28 @@ def __repr__(self) -> str: async def run(self, arguments: dict[str, Any]) -> ToolResult: """Execute the HTTP request using RequestDirector.""" + # Build the request β€” errors here are programming/schema issues, + # not HTTP failures, so we catch them separately. try: base_url = str(self._client.base_url) or "http://localhost" - - # Build the request using RequestDirector request = self._director.build(self._route, arguments, base_url) - # Add client headers (lowest precedence) if self._client.headers: for key, value in self._client.headers.items(): if key not in request.headers: request.headers[key] = value - # Add MCP transport headers (highest precedence) mcp_headers = get_http_headers() if mcp_headers: request.headers.update(mcp_headers) + except Exception as e: + raise ValueError( + f"Error building request for {self._route.method.upper()} " + f"{self._route.path}: {type(e).__name__}: {e}" + ) from e + # Send the request and process the response. + try: logger.debug(f"run - sending request; headers: {request.headers}") response = await self._client.send(request) @@ -196,6 +201,12 @@ async def run(self, arguments: dict[str, Any]) -> ToolResult: else: structured_output = result + # Structured content must be a dict for the MCP protocol. + # Wrap non-dict values that slipped through (e.g. a backend + # returning an array when the schema declared an object). + if not isinstance(structured_output, dict): + structured_output = {"result": structured_output} + return ToolResult(structured_content=structured_output) except json.JSONDecodeError: return ToolResult(content=response.text) diff --git a/src/fastmcp/server/providers/openapi/provider.py b/src/fastmcp/server/providers/openapi/provider.py index ac79af400d..975e0228b2 100644 --- a/src/fastmcp/server/providers/openapi/provider.py +++ b/src/fastmcp/server/providers/openapi/provider.py @@ -34,7 +34,6 @@ from fastmcp.utilities.openapi import ( HTTPRoute, extract_output_schema_from_responses, - format_simple_description, parse_openapi_to_http_routes, ) from fastmcp.utilities.openapi.director import RequestDirector @@ -79,6 +78,7 @@ def __init__( mcp_component_fn: ComponentFn | None = None, mcp_names: dict[str, str] | None = None, tags: set[str] | None = None, + validate_output: bool = True, ): """Initialize provider by parsing OpenAPI spec and creating components. @@ -93,6 +93,10 @@ def __init__( mcp_component_fn: Optional callable for component customization mcp_names: Optional dictionary mapping operationId to component names tags: Optional set of tags to add to all components + validate_output: If True (default), tools use the output schema + extracted from the OpenAPI spec for response validation. If + False, a permissive schema is used instead, allowing any + response structure while still returning structured JSON. """ super().__init__() @@ -101,6 +105,7 @@ def __init__( client = self._create_default_client(openapi_spec) self._client = client self._mcp_component_fn = mcp_component_fn + self._validate_output = validate_output # Keep track of names to detect collisions self._used_names: dict[str, Counter[str]] = { @@ -232,24 +237,30 @@ def _create_openapi_tool( route.openapi_version, ) + if not self._validate_output and output_schema is not None: + # Use a permissive schema that accepts any object, preserving + # the wrap-result flag so non-object responses still get wrapped + permissive: dict[str, Any] = { + "type": "object", + "additionalProperties": True, + } + if output_schema.get("x-fastmcp-wrap-result"): + permissive["x-fastmcp-wrap-result"] = True + output_schema = permissive + tool_name = self._get_unique_name(name, "tool") base_description = ( route.description or route.summary or f"Executes {route.method} {route.path}" ) - enhanced_description = format_simple_description( - base_description=base_description, - parameters=route.parameters, - request_body=route.request_body, - ) tool = OpenAPITool( client=self._client, route=route, director=self._director, name=tool_name, - description=enhanced_description, + description=base_description, parameters=combined_schema, output_schema=output_schema, tags=set(route.tags or []) | tags, @@ -276,11 +287,6 @@ def _create_openapi_resource( base_description = ( route.description or route.summary or f"Represents {route.path}" ) - enhanced_description = format_simple_description( - base_description=base_description, - parameters=route.parameters, - request_body=route.request_body, - ) resource = OpenAPIResource( client=self._client, @@ -288,7 +294,7 @@ def _create_openapi_resource( director=self._director, uri=resource_uri, name=resource_name, - description=enhanced_description, + description=base_description, mime_type=_extract_mime_type_from_route(route), tags=set(route.tags or []) | tags, ) @@ -321,11 +327,6 @@ def _create_openapi_template( base_description = ( route.description or route.summary or f"Template for {route.path}" ) - enhanced_description = format_simple_description( - base_description=base_description, - parameters=route.parameters, - request_body=route.request_body, - ) template_params_schema = { "type": "object", @@ -355,7 +356,7 @@ def _create_openapi_template( director=self._director, uri_template=uri_template_str, name=template_name, - description=enhanced_description, + description=base_description, parameters=template_params_schema, tags=set(route.tags or []) | tags, mime_type=_extract_mime_type_from_route(route), diff --git a/src/fastmcp/server/providers/proxy.py b/src/fastmcp/server/providers/proxy.py index d063373961..0b7ce0096e 100644 --- a/src/fastmcp/server/providers/proxy.py +++ b/src/fastmcp/server/providers/proxy.py @@ -16,6 +16,7 @@ import mcp.types from mcp import ServerSession from mcp.client.session import ClientSession +from mcp.server.lowlevel.server import request_ctx from mcp.shared.context import LifespanContextT, RequestContext from mcp.shared.exceptions import McpError from mcp.types import ( @@ -121,6 +122,12 @@ async def run( client = await self._get_client() async with client: ctx = context or get_context() + # StatefulProxyClient reuses sessions across requests, so + # its receive-loop task has stale ContextVars from the first + # request. Stash the current RequestContext in the shared + # ref so handlers can restore it before forwarding. + if isinstance(client, StatefulProxyClient): + cast(list[Any], client._proxy_rc_ref)[0] = ctx.request_context # Build meta dict from request context meta: dict[str, Any] | None = None if hasattr(ctx, "request_context"): @@ -781,16 +788,50 @@ async def default_proxy_progress_handler( await ctx.report_progress(progress, total, message) +def _restore_request_context( + rc_ref: list[Any], +) -> None: + """Set the ``request_ctx`` ContextVar from a stashed RequestContext. + + Called at the start of proxy handler invocations in + ``StatefulProxyClient`` to fix stale ContextVars in the receive-loop + task. Only overrides when the ContextVar is genuinely stale (same + session, different request_id) to avoid corrupting the concurrent + case where multiple sessions share the same ref via ``copy.copy``. + """ + rc = rc_ref[0] + if rc is None: + return + try: + current_rc = request_ctx.get() + except LookupError: + request_ctx.set(rc) + return + if current_rc.session is rc.session and current_rc.request_id != rc.request_id: + request_ctx.set(rc) + + +def _make_restoring_handler(handler: Callable, rc_ref: list[Any]) -> Callable: + """Wrap a proxy handler to restore request_ctx before delegating. + + The wrapper is a plain ``async def`` so it passes + ``inspect.isfunction()`` checks in handler registration paths + (e.g., ``create_roots_callback``). + """ + + async def wrapper(*args: Any, **kwargs: Any) -> Any: + _restore_request_context(rc_ref) + return await handler(*args, **kwargs) + + return wrapper + + class ProxyClient(Client[ClientTransportT]): """A proxy client that forwards advanced interactions between a remote MCP server and the proxy's connected clients. Supports forwarding roots, sampling, elicitation, logging, and progress. """ - # Stored context for handlers when contextvar isn't available - # (e.g., when receive loop was started before any request context) - _proxy_context: Context | None = None - def __init__( self, transport: ClientTransportT @@ -826,9 +867,39 @@ class StatefulProxyClient(ProxyClient[ClientTransportT]): This is useful to proxy a stateful mcp server such as the Playwright MCP server. Note that it is essential to ensure that the proxy server itself is also stateful. + + Because session reuse means the receive-loop task inherits a stale + ``request_ctx`` ContextVar snapshot, the default proxy handlers are + replaced with versions that restore the ContextVar before forwarding. + ``ProxyTool.run`` stashes the current ``RequestContext`` in + ``_proxy_rc_ref`` before each backend call, and the handlers consult + it to detect (and correct) staleness. """ + # Mutable list shared across copies (Client.new() uses copy.copy, + # which preserves references to mutable containers). ProxyTool.run + # writes [0] before each backend call; handlers read it to detect + # stale ContextVars and restore the correct request_ctx. + # + # We store the concrete RequestContext (not fastmcp's Context) because + # Context properties are themselves ContextVar-dependent and resolve + # in the caller's async context β€” which is stale in the receive loop. + _proxy_rc_ref: list[Any] + def __init__(self, *args: Any, **kwargs: Any): + # Install context-restoring handler wrappers BEFORE super().__init__ + # registers them with the Client's session kwargs. + self._proxy_rc_ref = [None] + for key, default_fn in ( + ("roots", default_proxy_roots_handler), + ("sampling_handler", default_proxy_sampling_handler), + ("elicitation_handler", default_proxy_elicitation_handler), + ("log_handler", default_proxy_log_handler), + ("progress_handler", default_proxy_progress_handler), + ): + if key not in kwargs: + kwargs[key] = _make_restoring_handler(default_fn, self._proxy_rc_ref) + super().__init__(*args, **kwargs) self._caches: dict[ServerSession, Client[ClientTransportT]] = {} diff --git a/src/fastmcp/server/sampling/run.py b/src/fastmcp/server/sampling/run.py index 7299689162..6ece2c30e4 100644 --- a/src/fastmcp/server/sampling/run.py +++ b/src/fastmcp/server/sampling/run.py @@ -8,6 +8,7 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Generic, Literal, cast +import anyio from mcp.types import ( ClientCapabilities, CreateMessageResult, @@ -31,6 +32,9 @@ from fastmcp import settings from fastmcp.exceptions import ToolError from fastmcp.server.sampling.sampling_tool import SamplingTool +from fastmcp.tools.function_tool import FunctionTool +from fastmcp.tools.tool_transform import TransformedTool +from fastmcp.utilities.async_utils import gather from fastmcp.utilities.json_schema import compress_schema from fastmcp.utilities.logging import get_logger from fastmcp.utilities.types import get_cached_typeadapter @@ -239,6 +243,7 @@ async def execute_tools( tool_calls: list[ToolUseContent], tool_map: dict[str, SamplingTool], mask_error_details: bool = False, + tool_concurrency: int | None = None, ) -> list[ToolResultContent]: """Execute tool calls and return results. @@ -249,66 +254,96 @@ async def execute_tools( When masked, only generic error messages are returned to the LLM. Tools can explicitly raise ToolError to bypass masking when they want to provide specific error messages to the LLM. + tool_concurrency: Controls parallel execution of tools: + - None (default): Sequential execution (one at a time) + - 0: Unlimited parallel execution + - N > 0: Execute at most N tools concurrently + If any tool has sequential=True, all tools execute sequentially + regardless of this setting. Returns: - List of tool result content blocks. + List of tool result content blocks in the same order as tool_calls. """ - tool_results: list[ToolResultContent] = [] + if tool_concurrency is not None and tool_concurrency < 0: + raise ValueError( + f"tool_concurrency must be None, 0 (unlimited), or a positive integer, " + f"got {tool_concurrency}" + ) - for tool_use in tool_calls: + async def _execute_single_tool(tool_use: ToolUseContent) -> ToolResultContent: + """Execute a single tool and return its result.""" tool = tool_map.get(tool_use.name) if tool is None: - tool_results.append( - ToolResultContent( - type="tool_result", - toolUseId=tool_use.id, - content=[ - TextContent( - type="text", - text=f"Error: Unknown tool '{tool_use.name}'", - ) - ], - isError=True, - ) - ) - else: - try: - result_value = await tool.run(tool_use.input) - tool_results.append( - ToolResultContent( - type="tool_result", - toolUseId=tool_use.id, - content=[TextContent(type="text", text=str(result_value))], - ) - ) - except ToolError as e: - # ToolError is the escape hatch - always pass message through - logger.exception(f"Error calling sampling tool '{tool_use.name}'") - tool_results.append( - ToolResultContent( - type="tool_result", - toolUseId=tool_use.id, - content=[TextContent(type="text", text=str(e))], - isError=True, - ) - ) - except Exception as e: - # Generic exceptions - mask based on setting - logger.exception(f"Error calling sampling tool '{tool_use.name}'") - if mask_error_details: - error_text = f"Error executing tool '{tool_use.name}'" - else: - error_text = f"Error executing tool '{tool_use.name}': {e}" - tool_results.append( - ToolResultContent( - type="tool_result", - toolUseId=tool_use.id, - content=[TextContent(type="text", text=error_text)], - isError=True, + return ToolResultContent( + type="tool_result", + toolUseId=tool_use.id, + content=[ + TextContent( + type="text", + text=f"Error: Unknown tool '{tool_use.name}'", ) - ) + ], + isError=True, + ) + + try: + result_value = await tool.run(tool_use.input) + return ToolResultContent( + type="tool_result", + toolUseId=tool_use.id, + content=[TextContent(type="text", text=str(result_value))], + ) + except ToolError as e: + # ToolError is the escape hatch - always pass message through + logger.exception(f"Error calling sampling tool '{tool_use.name}'") + return ToolResultContent( + type="tool_result", + toolUseId=tool_use.id, + content=[TextContent(type="text", text=str(e))], + isError=True, + ) + except Exception as e: + # Generic exceptions - mask based on setting + logger.exception(f"Error calling sampling tool '{tool_use.name}'") + if mask_error_details: + error_text = f"Error executing tool '{tool_use.name}'" + else: + error_text = f"Error executing tool '{tool_use.name}': {e}" + return ToolResultContent( + type="tool_result", + toolUseId=tool_use.id, + content=[TextContent(type="text", text=error_text)], + isError=True, + ) + + # Check if any tool requires sequential execution + requires_sequential = any( + tool.sequential + for tool_use in tool_calls + if (tool := tool_map.get(tool_use.name)) is not None + ) + + # Execute sequentially if required or if concurrency is None (default) + if tool_concurrency is None or requires_sequential: + tool_results: list[ToolResultContent] = [] + for tool_use in tool_calls: + result = await _execute_single_tool(tool_use) + tool_results.append(result) + return tool_results + + # Execute in parallel + if tool_concurrency == 0: + # Unlimited parallel execution + return await gather(*[_execute_single_tool(tc) for tc in tool_calls]) + else: + # Bounded parallel execution with semaphore + semaphore = anyio.Semaphore(tool_concurrency) + + async def bounded_execute(tool_use: ToolUseContent) -> ToolResultContent: + async with semaphore: + return await _execute_single_tool(tool_use) - return tool_results + return await gather(*[bounded_execute(tc) for tc in tool_calls]) # --- Helper functions for sampling --- @@ -334,9 +369,22 @@ def prepare_messages( def prepare_tools( - tools: Sequence[SamplingTool | Callable[..., Any]] | None, + tools: Sequence[SamplingTool | FunctionTool | TransformedTool | Callable[..., Any]] + | None, ) -> list[SamplingTool] | None: - """Convert tools to SamplingTool objects.""" + """Convert tools to SamplingTool objects. + + Accepts SamplingTool instances, FunctionTool instances, TransformedTool instances, + or plain callable functions. FunctionTool and TransformedTool are converted using + from_callable_tool(), while plain functions use from_function(). + + Args: + tools: Sequence of tools to prepare. Can be SamplingTool, FunctionTool, + TransformedTool, or plain callable functions. + + Returns: + List of SamplingTool instances, or None if tools is None. + """ if tools is None: return None @@ -344,10 +392,14 @@ def prepare_tools( for t in tools: if isinstance(t, SamplingTool): sampling_tools.append(t) + elif isinstance(t, (FunctionTool, TransformedTool)): + sampling_tools.append(SamplingTool.from_callable_tool(t)) elif callable(t): sampling_tools.append(SamplingTool.from_function(t)) else: - raise TypeError(f"Expected SamplingTool or callable, got {type(t)}") + raise TypeError( + f"Expected SamplingTool, FunctionTool, TransformedTool, or callable, got {type(t)}" + ) return sampling_tools if sampling_tools else None @@ -408,10 +460,12 @@ async def sample_step_impl( temperature: float | None = None, max_tokens: int | None = None, model_preferences: ModelPreferences | str | list[str] | None = None, - tools: Sequence[SamplingTool | Callable[..., Any]] | None = None, + tools: Sequence[SamplingTool | FunctionTool | TransformedTool | Callable[..., Any]] + | None = None, tool_choice: ToolChoiceOption | str | None = None, auto_execute_tools: bool = True, mask_error_details: bool | None = None, + tool_concurrency: int | None = None, ) -> SampleStep: """Implementation of Context.sample_step(). @@ -498,7 +552,10 @@ async def sample_step_impl( else settings.mask_error_details ) tool_results: list[ToolResultContent] = await execute_tools( - step_tool_calls, tool_map, mask_error_details=effective_mask + step_tool_calls, + tool_map, + mask_error_details=effective_mask, + tool_concurrency=tool_concurrency, ) if tool_results: @@ -520,9 +577,11 @@ async def sample_impl( temperature: float | None = None, max_tokens: int | None = None, model_preferences: ModelPreferences | str | list[str] | None = None, - tools: Sequence[SamplingTool | Callable[..., Any]] | None = None, + tools: Sequence[SamplingTool | FunctionTool | TransformedTool | Callable[..., Any]] + | None = None, result_type: type[ResultT] | None = None, mask_error_details: bool | None = None, + tool_concurrency: int | None = None, ) -> SamplingResult[ResultT]: """Implementation of Context.sample(). @@ -561,6 +620,7 @@ async def sample_impl( tools=sampling_tools, tool_choice=tool_choice, mask_error_details=mask_error_details, + tool_concurrency=tool_concurrency, ) # Check for final_response tool call for structured output diff --git a/src/fastmcp/server/sampling/sampling_tool.py b/src/fastmcp/server/sampling/sampling_tool.py index 106c55fc60..1781eb6517 100644 --- a/src/fastmcp/server/sampling/sampling_tool.py +++ b/src/fastmcp/server/sampling/sampling_tool.py @@ -6,10 +6,14 @@ from collections.abc import Callable from typing import Any +from mcp.types import TextContent from mcp.types import Tool as SDKTool from pydantic import ConfigDict from fastmcp.tools.function_parsing import ParsedFunction +from fastmcp.tools.function_tool import FunctionTool +from fastmcp.tools.tool import ToolResult +from fastmcp.tools.tool_transform import TransformedTool from fastmcp.utilities.types import FastMCPBaseModel @@ -40,6 +44,7 @@ def search(query: str) -> str: description: str | None = None parameters: dict[str, Any] fn: Callable[..., Any] + sequential: bool = False model_config = ConfigDict(arbitrary_types_allowed=True) @@ -79,6 +84,7 @@ def from_function( *, name: str | None = None, description: str | None = None, + sequential: bool = False, ) -> SamplingTool: """Create a SamplingTool from a function. @@ -89,6 +95,10 @@ def from_function( fn: The function to create a tool from. name: Optional name override. Defaults to the function's name. description: Optional description override. Defaults to the function's docstring. + sequential: If True, this tool requires sequential execution and prevents + parallel execution of all tools in the batch. Set to True for tools + with shared state, file writes, or other operations that cannot run + concurrently. Defaults to False. Returns: A SamplingTool wrapping the function. @@ -106,4 +116,68 @@ def from_function( description=description or parsed.description, parameters=parsed.input_schema, fn=parsed.fn, + sequential=sequential, + ) + + @classmethod + def from_callable_tool( + cls, + tool: FunctionTool | TransformedTool, + *, + name: str | None = None, + description: str | None = None, + ) -> SamplingTool: + """Create a SamplingTool from a FunctionTool or TransformedTool. + + Reuses existing server tools in sampling contexts. For TransformedTool, + the tool's .run() method is used to ensure proper argument transformation, + and the ToolResult is automatically unwrapped. + + Args: + tool: A FunctionTool or TransformedTool to convert. + name: Optional name override. Defaults to tool.name. + description: Optional description override. Defaults to tool.description. + + Raises: + TypeError: If the tool is not a FunctionTool or TransformedTool. + """ + # Validate that the tool is a supported type + if not isinstance(tool, (FunctionTool, TransformedTool)): + raise TypeError( + f"Expected FunctionTool or TransformedTool, got {type(tool).__name__}. " + "Only callable tools can be converted to SamplingTools." + ) + + # Both FunctionTool and TransformedTool need .run() to ensure proper + # result processing (serializers, output_schema, wrap-result flags) + async def wrapper(**kwargs: Any) -> Any: + result = await tool.run(kwargs) + # Unwrap ToolResult - extract the actual value + if isinstance(result, ToolResult): + # If there's structured_content, use that + if result.structured_content is not None: + # Check tool's schema - this is the source of truth + if tool.output_schema and tool.output_schema.get( + "x-fastmcp-wrap-result" + ): + # Tool wraps results: {"result": value} -> value + return result.structured_content.get("result") + else: + # No wrapping: use structured_content directly + return result.structured_content + # Otherwise, extract from text content + if result.content and len(result.content) > 0: + first_content = result.content[0] + if isinstance(first_content, TextContent): + return first_content.text + return result + + fn = wrapper + + # Extract the callable function, name, description, and parameters + return cls( + name=name or tool.name, + description=description or tool.description, + parameters=tool.parameters, + fn=fn, ) diff --git a/src/fastmcp/server/server.py b/src/fastmcp/server/server.py index 7dc7478216..8ad329b2dd 100644 --- a/src/fastmcp/server/server.py +++ b/src/fastmcp/server/server.py @@ -10,8 +10,6 @@ AsyncIterator, Awaitable, Callable, - Collection, - Mapping, Sequence, ) from contextlib import ( @@ -63,7 +61,7 @@ app_config_to_meta_dict, resolve_ui_mime_type, ) -from fastmcp.server.auth import AuthContext, AuthProvider, run_auth_checks +from fastmcp.server.auth import AuthCheck, AuthContext, AuthProvider, run_auth_checks from fastmcp.server.dependencies import get_access_token from fastmcp.server.lifespan import Lifespan from fastmcp.server.low_level import LowLevelServer @@ -79,9 +77,8 @@ ) from fastmcp.server.transforms.visibility import apply_session_transforms, is_enabled from fastmcp.settings import DuplicateBehavior as DuplicateBehaviorSetting -from fastmcp.settings import Settings from fastmcp.tools.function_tool import FunctionTool -from fastmcp.tools.tool import AuthCheckCallable, Tool, ToolResult +from fastmcp.tools.tool import Tool, ToolResult from fastmcp.tools.tool_transform import ToolTransformConfig from fastmcp.utilities.components import FastMCPComponent from fastmcp.utilities.logging import get_logger @@ -99,7 +96,6 @@ from fastmcp.server.providers.openapi import RouteMap from fastmcp.server.providers.openapi import RouteMapFn as OpenAPIRouteMapFn from fastmcp.server.providers.proxy import FastMCPProxy - from fastmcp.tools.tool import ToolResultSerializerType logger = get_logger(__name__) @@ -107,39 +103,37 @@ DuplicateBehavior = Literal["warn", "error", "replace", "ignore"] -def _resolve_on_duplicate( - on_duplicate: DuplicateBehavior | None, - on_duplicate_tools: DuplicateBehavior | None, - on_duplicate_resources: DuplicateBehavior | None, - on_duplicate_prompts: DuplicateBehavior | None, -) -> DuplicateBehavior: - """Resolve on_duplicate from deprecated per-type params. - - Takes the most strict value if multiple are provided. - Delete this function when removing deprecated params. - """ - strictness_order: list[DuplicateBehavior] = ["error", "warn", "replace", "ignore"] - deprecated_values: list[DuplicateBehavior] = [] - - deprecated_params: list[tuple[str, DuplicateBehavior | None]] = [ - ("on_duplicate_tools", on_duplicate_tools), - ("on_duplicate_resources", on_duplicate_resources), - ("on_duplicate_prompts", on_duplicate_prompts), - ] - for name, value in deprecated_params: - if value is not None: - if fastmcp.settings.deprecation_warnings: - warnings.warn( - f"{name} is deprecated, use on_duplicate instead", - DeprecationWarning, - stacklevel=4, - ) - deprecated_values.append(value) - - if on_duplicate is None and deprecated_values: - return min(deprecated_values, key=lambda x: strictness_order.index(x)) - - return on_duplicate or "warn" +_REMOVED_KWARGS: dict[str, str] = { + "host": "Pass `host` to `run_http_async()`, or set FASTMCP_HOST.", + "port": "Pass `port` to `run_http_async()`, or set FASTMCP_PORT.", + "sse_path": "Pass `path` to `run_http_async()` or `http_app()`, or set FASTMCP_SSE_PATH.", + "message_path": "Set FASTMCP_MESSAGE_PATH.", + "streamable_http_path": "Pass `path` to `run_http_async()` or `http_app()`, or set FASTMCP_STREAMABLE_HTTP_PATH.", + "json_response": "Pass `json_response` to `run_http_async()` or `http_app()`, or set FASTMCP_JSON_RESPONSE.", + "stateless_http": "Pass `stateless_http` to `run_http_async()` or `http_app()`, or set FASTMCP_STATELESS_HTTP.", + "debug": "Set FASTMCP_DEBUG.", + "log_level": "Pass `log_level` to `run_http_async()`, or set FASTMCP_LOG_LEVEL.", + "on_duplicate_tools": "Use `on_duplicate=` instead.", + "on_duplicate_resources": "Use `on_duplicate=` instead.", + "on_duplicate_prompts": "Use `on_duplicate=` instead.", + "tool_serializer": "Return ToolResult from your tools instead. See https://gofastmcp.com/servers/tools#custom-serialization", + "include_tags": "Use `server.enable(tags=..., only=True)` after creating the server.", + "exclude_tags": "Use `server.disable(tags=...)` after creating the server.", + "tool_transformations": "Use `server.add_transform(ToolTransform(...))` after creating the server.", +} + + +def _check_removed_kwargs(kwargs: dict[str, Any]) -> None: + """Raise helpful TypeErrors for kwargs removed in v3.""" + for key in kwargs: + if key in _REMOVED_KWARGS: + raise TypeError( + f"FastMCP() no longer accepts `{key}`. {_REMOVED_KWARGS[key]}" + ) + if kwargs: + raise TypeError( + f"FastMCP() got unexpected keyword argument(s): {', '.join(repr(k) for k in kwargs)}" + ) Transport = Literal["stdio", "http", "sse", "streamable-http"] @@ -232,45 +226,24 @@ def __init__( middleware: Sequence[Middleware] | None = None, providers: Sequence[Provider] | None = None, lifespan: LifespanCallable | Lifespan | None = None, - mask_error_details: bool | None = None, tools: Sequence[Tool | Callable[..., Any]] | None = None, - tool_serializer: ToolResultSerializerType | None = None, - include_tags: Collection[str] | None = None, - exclude_tags: Collection[str] | None = None, on_duplicate: DuplicateBehavior | None = None, + mask_error_details: bool | None = None, + dereference_schemas: bool = True, strict_input_validation: bool | None = None, list_page_size: int | None = None, tasks: bool | None = None, session_state_store: AsyncKeyValue | None = None, - # --- - # --- DEPRECATED parameters --- - # --- - on_duplicate_tools: DuplicateBehavior | None = None, - on_duplicate_resources: DuplicateBehavior | None = None, - on_duplicate_prompts: DuplicateBehavior | None = None, - log_level: str | None = None, - debug: bool | None = None, - host: str | None = None, - port: int | None = None, - sse_path: str | None = None, - message_path: str | None = None, - streamable_http_path: str | None = None, - json_response: bool | None = None, - stateless_http: bool | None = None, sampling_handler: SamplingHandler | None = None, sampling_handler_behavior: Literal["always", "fallback"] | None = None, - tool_transformations: Mapping[str, ToolTransformConfig] | None = None, + **kwargs: Any, ): + _check_removed_kwargs(kwargs) + # Initialize Provider (sets up _transforms) super().__init__() - # Resolve on_duplicate from deprecated params (delete when removing deprecation) - self._on_duplicate: DuplicateBehaviorSetting = _resolve_on_duplicate( - on_duplicate, - on_duplicate_tools, - on_duplicate_resources, - on_duplicate_prompts, - ) + self._on_duplicate: DuplicateBehaviorSetting = on_duplicate or "warn" # Resolve server default for background task support self._support_tasks_by_default: bool = tasks if tasks is not None else False @@ -312,16 +285,6 @@ def __init__( raise ValueError("list_page_size must be a positive integer") self._list_page_size: int | None = list_page_size - if tool_serializer is not None and fastmcp.settings.deprecation_warnings: - warnings.warn( - "The `tool_serializer` parameter is deprecated. " - "Return ToolResult from your tools for full control over serialization. " - "See https://gofastmcp.com/servers/tools#custom-serialization for migration examples.", - DeprecationWarning, - stacklevel=2, - ) - self._tool_serializer: Callable[[Any], str] | None = tool_serializer - # Handle Lifespan instances (they're callable) or regular lifespan functions if lifespan is not None: self._lifespan: LifespanCallable[LifespanResultT] = lifespan @@ -349,38 +312,9 @@ def __init__( if tools: for tool in tools: if not isinstance(tool, Tool): - tool = Tool.from_function(tool, serializer=self._tool_serializer) + tool = Tool.from_function(tool) self.add_tool(tool) - # Handle deprecated include_tags and exclude_tags parameters - if include_tags is not None: - warnings.warn( - "include_tags is deprecated. Use server.enable(tags=..., only=True) instead.", - DeprecationWarning, - stacklevel=2, - ) - # For backwards compatibility, initialize allowlist from include_tags - self.enable(tags=set(include_tags), only=True) - if exclude_tags is not None: - warnings.warn( - "exclude_tags is deprecated. Use server.disable(tags=...) instead.", - DeprecationWarning, - stacklevel=2, - ) - # For backwards compatibility, initialize blocklist from exclude_tags - self.disable(tags=set(exclude_tags)) - - # Handle deprecated tool_transformations parameter - if tool_transformations: - if fastmcp.settings.deprecation_warnings: - warnings.warn( - "The tool_transformations parameter is deprecated. Use " - "server.add_transform(ToolTransform({...})) instead.", - DeprecationWarning, - stacklevel=2, - ) - self._transforms.append(ToolTransform(dict(tool_transformations))) - self.strict_input_validation: bool = ( strict_input_validation if strict_input_validation is not None @@ -389,6 +323,13 @@ def __init__( self.middleware: list[Middleware] = list(middleware or []) + if dereference_schemas: + from fastmcp.server.middleware.dereference import ( + DereferenceRefsMiddleware, + ) + + self.middleware.append(DereferenceRefsMiddleware()) + # Set up MCP protocol handlers self._setup_handlers() @@ -397,71 +338,9 @@ def __init__( sampling_handler_behavior or "fallback" ) - self._handle_deprecated_settings( - log_level=log_level, - debug=debug, - host=host, - port=port, - sse_path=sse_path, - message_path=message_path, - streamable_http_path=streamable_http_path, - json_response=json_response, - stateless_http=stateless_http, - ) - def __repr__(self) -> str: return f"{type(self).__name__}({self.name!r})" - def _handle_deprecated_settings( - self, - log_level: str | None, - debug: bool | None, - host: str | None, - port: int | None, - sse_path: str | None, - message_path: str | None, - streamable_http_path: str | None, - json_response: bool | None, - stateless_http: bool | None, - ) -> None: - """Handle deprecated settings. Deprecated in 2.8.0.""" - deprecated_settings: dict[str, Any] = {} - - for name, arg in [ - ("log_level", log_level), - ("debug", debug), - ("host", host), - ("port", port), - ("sse_path", sse_path), - ("message_path", message_path), - ("streamable_http_path", streamable_http_path), - ("json_response", json_response), - ("stateless_http", stateless_http), - ]: - if arg is not None: - # Deprecated in 2.8.0 - if fastmcp.settings.deprecation_warnings: - warnings.warn( - f"Providing `{name}` when creating a server is deprecated. Provide it when calling `run` or as a global setting instead.", - DeprecationWarning, - stacklevel=2, - ) - deprecated_settings[name] = arg - - combined_settings = fastmcp.settings.model_dump() | deprecated_settings - self._deprecated_settings = Settings(**combined_settings) - - @property - def settings(self) -> Settings: - # Deprecated in 2.8.0 - if fastmcp.settings.deprecation_warnings: - warnings.warn( - "Accessing `.settings` on a FastMCP instance is deprecated. Use the global `fastmcp.settings` instead.", - DeprecationWarning, - stacklevel=2, - ) - return self._deprecated_settings - @property def name(self) -> str: return self._mcp_server.name @@ -489,6 +368,18 @@ def icons(self) -> list[mcp.types.Icon]: else: return list(self._mcp_server.icons) + @property + def local_provider(self) -> LocalProvider: + """The server's local provider, which stores directly-registered components. + + Use this to remove components: + + mcp.local_provider.remove_tool("my_tool") + mcp.local_provider.remove_resource("data://info") + mcp.local_provider.remove_prompt("my_prompt") + """ + return self._local_provider + async def _run_middleware( self, context: MiddlewareContext[Any], @@ -638,7 +529,7 @@ async def list_tools(self, *, run_middleware: bool = True) -> Sequence[Tool]: if not skip_auth and tool.auth is not None: ctx = AuthContext(token=token, component=tool) try: - if not run_auth_checks(tool.auth, ctx): + if not await run_auth_checks(tool.auth, ctx): continue except AuthorizationError: continue @@ -669,7 +560,7 @@ async def _get_tool( if not skip_auth and tool.auth is not None: ctx = AuthContext(token=token, component=tool) try: - if not run_auth_checks(tool.auth, ctx): + if not await run_auth_checks(tool.auth, ctx): return None except AuthorizationError: return None @@ -736,7 +627,7 @@ async def list_resources( if not skip_auth and resource.auth is not None: ctx = AuthContext(token=token, component=resource) try: - if not run_auth_checks(resource.auth, ctx): + if not await run_auth_checks(resource.auth, ctx): continue except AuthorizationError: continue @@ -767,7 +658,7 @@ async def _get_resource( if not skip_auth and resource.auth is not None: ctx = AuthContext(token=token, component=resource) try: - if not run_auth_checks(resource.auth, ctx): + if not await run_auth_checks(resource.auth, ctx): return None except AuthorizationError: return None @@ -835,7 +726,7 @@ async def list_resource_templates( if not skip_auth and template.auth is not None: ctx = AuthContext(token=token, component=template) try: - if not run_auth_checks(template.auth, ctx): + if not await run_auth_checks(template.auth, ctx): continue except AuthorizationError: continue @@ -866,7 +757,7 @@ async def _get_resource_template( if not skip_auth and template.auth is not None: ctx = AuthContext(token=token, component=template) try: - if not run_auth_checks(template.auth, ctx): + if not await run_auth_checks(template.auth, ctx): return None except AuthorizationError: return None @@ -930,7 +821,7 @@ async def list_prompts(self, *, run_middleware: bool = True) -> Sequence[Prompt] if not skip_auth and prompt.auth is not None: ctx = AuthContext(token=token, component=prompt) try: - if not run_auth_checks(prompt.auth, ctx): + if not await run_auth_checks(prompt.auth, ctx): continue except AuthorizationError: continue @@ -961,7 +852,7 @@ async def _get_prompt( if not skip_auth and prompt.auth is not None: ctx = AuthContext(token=token, component=prompt) try: - if not run_auth_checks(prompt.auth, ctx): + if not await run_auth_checks(prompt.auth, ctx): return None except AuthorizationError: return None @@ -1378,6 +1269,9 @@ def add_tool(self, tool: Tool | Callable[..., Any]) -> Tool: def remove_tool(self, name: str, version: str | None = None) -> None: """Remove tool(s) from the server. + .. deprecated:: + Use ``mcp.local_provider.remove_tool(name)`` instead. + Args: name: The name of the tool to remove. version: If None, removes ALL versions. If specified, removes only that version. @@ -1385,6 +1279,13 @@ def remove_tool(self, name: str, version: str | None = None) -> None: Raises: NotFoundError: If no matching tool is found. """ + if fastmcp.settings.deprecation_warnings: + warnings.warn( + "remove_tool() is deprecated. Use " + "mcp.local_provider.remove_tool(name) instead.", + DeprecationWarning, + stacklevel=2, + ) try: self._local_provider.remove_tool(name, version) except KeyError: @@ -1412,7 +1313,7 @@ def tool( app: AppConfig | dict[str, Any] | bool | None = None, task: bool | TaskConfig | None = None, timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionTool: ... @overload @@ -1433,7 +1334,7 @@ def tool( app: AppConfig | dict[str, Any] | bool | None = None, task: bool | TaskConfig | None = None, timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[AnyFunction], FunctionTool]: ... def tool( @@ -1453,7 +1354,7 @@ def tool( app: AppConfig | dict[str, Any] | bool | None = None, task: bool | TaskConfig | None = None, timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> ( Callable[[AnyFunction], FunctionTool] | FunctionTool @@ -1530,7 +1431,6 @@ def my_tool(x: int) -> str: meta=meta, task=task if task is not None else self._support_tasks_by_default, timeout=timeout, - serializer=self._tool_serializer, auth=auth, ) @@ -1575,7 +1475,7 @@ def resource( meta: dict[str, Any] | None = None, app: AppConfig | dict[str, Any] | bool | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[AnyFunction], Resource | ResourceTemplate | AnyFunction]: """Decorator to register a function as a resource. @@ -1706,7 +1606,7 @@ def prompt( tags: set[str] | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionPrompt: ... @overload @@ -1722,7 +1622,7 @@ def prompt( tags: set[str] | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[AnyFunction], FunctionPrompt]: ... def prompt( @@ -1737,7 +1637,7 @@ def prompt( tags: set[str] | None = None, meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> ( Callable[[AnyFunction], FunctionPrompt] | FunctionPrompt @@ -2029,6 +1929,7 @@ def from_openapi( mcp_component_fn: OpenAPIComponentFn | None = None, mcp_names: dict[str, str] | None = None, tags: set[str] | None = None, + validate_output: bool = True, **settings: Any, ) -> Self: """ @@ -2045,6 +1946,10 @@ def from_openapi( mcp_component_fn: Optional callable for component customization mcp_names: Optional dictionary mapping operationId to component names tags: Optional set of tags to add to all components + validate_output: If True (default), tools use the output schema + extracted from the OpenAPI spec for response validation. If + False, a permissive schema is used instead, allowing any + response structure while still returning structured JSON. **settings: Additional settings passed to FastMCP Returns: @@ -2060,6 +1965,7 @@ def from_openapi( mcp_component_fn=mcp_component_fn, mcp_names=mcp_names, tags=tags, + validate_output=validate_output, ) return cls(name=name, providers=[provider], **settings) diff --git a/src/fastmcp/server/tasks/__init__.py b/src/fastmcp/server/tasks/__init__.py index 13ba9e80ce..008332db5b 100644 --- a/src/fastmcp/server/tasks/__init__.py +++ b/src/fastmcp/server/tasks/__init__.py @@ -5,12 +5,21 @@ from fastmcp.server.tasks.capabilities import get_task_capabilities from fastmcp.server.tasks.config import TaskConfig, TaskMeta, TaskMode -from fastmcp.server.tasks.elicitation import elicit_for_task, handle_task_input +from fastmcp.server.tasks.elicitation import ( + elicit_for_task, + handle_task_input, + relay_elicitation, +) from fastmcp.server.tasks.keys import ( build_task_key, get_client_task_id_from_key, parse_task_key, ) +from fastmcp.server.tasks.notifications import ( + ensure_subscriber_running, + push_notification, + stop_subscriber, +) __all__ = [ "TaskConfig", @@ -18,8 +27,12 @@ "TaskMode", "build_task_key", "elicit_for_task", + "ensure_subscriber_running", "get_client_task_id_from_key", "get_task_capabilities", "handle_task_input", "parse_task_key", + "push_notification", + "relay_elicitation", + "stop_subscriber", ] diff --git a/src/fastmcp/server/tasks/elicitation.py b/src/fastmcp/server/tasks/elicitation.py index 2fc0bef5fc..cb148cfc78 100644 --- a/src/fastmcp/server/tasks/elicitation.py +++ b/src/fastmcp/server/tasks/elicitation.py @@ -5,7 +5,7 @@ an active request context, so elicitation requires special handling: 1. Set task status to "input_required" via Redis -2. Send notifications/tasks/updated with elicitation metadata +2. Send notifications/tasks/status with elicitation metadata 3. Wait for client to send input via tasks/sendInput 4. Resume task execution with the provided input @@ -15,11 +15,11 @@ from __future__ import annotations -import asyncio import json import logging import uuid -from typing import TYPE_CHECKING, Any +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Any, cast import mcp.types from mcp import ServerSession @@ -41,7 +41,7 @@ async def elicit_for_task( task_id: str, - session: ServerSession, + session: ServerSession | None, message: str, schema: dict[str, Any], fastmcp: FastMCP, @@ -75,12 +75,21 @@ async def elicit_for_task( # Generate a unique request ID for this elicitation request_id = str(uuid.uuid4()) - # Get session ID for Redis key construction - session_id = getattr(session, "_fastmcp_state_prefix", None) - if session_id is None: - # Generate a session ID if not already set - session_id = str(uuid.uuid4()) - session._fastmcp_state_prefix = session_id # type: ignore[attr-defined] + # Get session ID from task context (authoritative source for background tasks) + # This is extracted from the Docket execution key: {session_id}:{task_id}:... + from fastmcp.server.dependencies import get_task_context + + task_context = get_task_context() + if task_context is not None: + session_id = task_context.session_id + else: + # Fallback: try to get from session attribute (shouldn't happen in background) + session_id = getattr(session, "_fastmcp_state_prefix", None) + if session_id is None: + raise RuntimeError( + "Cannot determine session_id for elicitation. " + "This typically means elicit_for_task() was called outside a Docket worker context." + ) # Store elicitation request in Redis request_key = ELICIT_REQUEST_KEY.format(session_id=session_id, task_id=task_id) @@ -107,14 +116,25 @@ async def elicit_for_task( ex=ELICIT_TTL_SECONDS, ) - # Send task status update notification with input_required status - # This follows SEP-1686 for background task status updates - notification = mcp.types.JSONRPCNotification( - jsonrpc="2.0", - method="notifications/tasks/updated", - params={}, - _meta={ # type: ignore[call-arg] - "modelcontextprotocol.io/related-task": { + # Send task status update notification with input_required status. + # Use notifications/tasks/status so typed MCP clients can consume it. + # + # NOTE: We use the distributed notification queue instead of session.send_notification() + # This enables notifications to work when workers run in separate processes + # (Azure Web PubSub / Service Bus inspired pattern) + timestamp = datetime.now(timezone.utc).isoformat() + notification_dict = { + "method": "notifications/tasks/status", + "params": { + "taskId": task_id, + "status": "input_required", + "statusMessage": message, + "createdAt": timestamp, + "lastUpdatedAt": timestamp, + "ttl": ELICIT_TTL_SECONDS * 1000, + }, + "_meta": { + "io.modelcontextprotocol/related-task": { "taskId": task_id, "status": "input_required", "statusMessage": message, @@ -125,54 +145,148 @@ async def elicit_for_task( }, } }, - ) + } + + # Push notification to Redis queue (works from any process) + # Server's subscriber loop will forward to client + from fastmcp.server.tasks.notifications import push_notification - # Send notification (best effort - task status is stored in Redis) - # Log failures for debugging but don't fail the elicitation try: - await session.send_notification(notification) # type: ignore[arg-type] + await push_notification(session_id, notification_dict, docket) except Exception as e: + # Fail fast: if notification can't be queued, client won't know to respond + # Return cancel immediately rather than waiting for 1-hour timeout logger.warning( - "Failed to send input_required notification for task %s: %s", + "Failed to queue input_required notification for task %s, cancelling elicitation: %s", task_id, e, ) + # Best-effort cleanup + try: + async with docket.redis() as redis: + await redis.delete( + docket.key(request_key), + docket.key(status_key), + ) + except Exception: + pass # Keys will expire via TTL + return mcp.types.ElicitResult(action="cancel", content=None) - # Wait for response (poll Redis) - # In a production implementation, this could use Redis pub/sub for lower latency + # Wait for response using BLPOP (blocking pop) + # This is much more efficient than polling - single Redis round-trip + # that blocks until a response is pushed, vs 7,200 round-trips/hour with polling max_wait_seconds = ELICIT_TTL_SECONDS - poll_interval = 0.5 # seconds - for _ in range(int(max_wait_seconds / poll_interval)): + try: async with docket.redis() as redis: - response_data = await redis.get(docket.key(response_key)) - if response_data: + # BLPOP blocks until an item is pushed to the list or timeout + # Returns tuple of (key, value) or None on timeout + result = await cast( + Any, + redis.blpop( + [docket.key(response_key)], + timeout=max_wait_seconds, + ), + ) + + if result: + # result is (key, value) tuple + _key, response_data = result response = json.loads(response_data) + # Clean up Redis keys await redis.delete( docket.key(request_key), - docket.key(response_key), docket.key(status_key), ) + # Convert to ElicitResult return mcp.types.ElicitResult( action=response.get("action", "accept"), content=response.get("content"), ) + except Exception as e: + logger.warning( + "BLPOP failed for task %s elicitation, falling back to cancel: %s", + task_id, + e, + ) - await asyncio.sleep(poll_interval) - - # Timeout - treat as cancellation - async with docket.redis() as redis: - await redis.delete( - docket.key(request_key), - docket.key(response_key), - docket.key(status_key), + # Timeout or error - treat as cancellation + # Best-effort cleanup - if Redis is unavailable, keys will expire via TTL + try: + async with docket.redis() as redis: + await redis.delete( + docket.key(request_key), + docket.key(response_key), + docket.key(status_key), + ) + except Exception as cleanup_error: + logger.debug( + "Failed to clean up elicitation keys for task %s (will expire via TTL): %s", + task_id, + cleanup_error, ) return mcp.types.ElicitResult(action="cancel", content=None) +async def relay_elicitation( + session: ServerSession, + session_id: str, + task_id: str, + elicitation: dict[str, Any], + fastmcp: FastMCP, +) -> None: + """Relay elicitation from a background task worker to the client. + + Called by the notification subscriber when it detects an input_required + notification with elicitation metadata. Sends a standard elicitation/create + request to the client session, then uses handle_task_input() to push the + response to Redis so the blocked worker can resume. + + Args: + session: MCP ServerSession + session_id: Session identifier + task_id: Background task ID + elicitation: Elicitation metadata (message, requestedSchema) + fastmcp: FastMCP server instance + """ + try: + result = await session.elicit( + message=elicitation["message"], + requestedSchema=elicitation["requestedSchema"], + ) + await handle_task_input( + task_id=task_id, + session_id=session_id, + action=result.action, + content=result.content, + fastmcp=fastmcp, + ) + logger.debug( + "Relayed elicitation response for task %s (action=%s)", + task_id, + result.action, + ) + except Exception as e: + logger.warning("Failed to relay elicitation for task %s: %s", task_id, e) + # Push a cancel response so the worker's BLPOP doesn't block forever + success = await handle_task_input( + task_id=task_id, + session_id=session_id, + action="cancel", + content=None, + fastmcp=fastmcp, + ) + if not success: + logger.warning( + "Failed to push cancel response for task %s " + "(worker may block until TTL)", + task_id, + ) + + async def handle_task_input( task_id: str, session_id: str, @@ -213,12 +327,15 @@ async def handle_task_input( if status is None or status.decode("utf-8") != "waiting": return False - # Store the response - await redis.set( + # Push response to list - this wakes up the BLPOP in elicit_for_task + # Using LPUSH instead of SET enables the efficient blocking wait pattern + await redis.lpush( # type: ignore[invalid-await] # redis-py union type (sync/async) docket.key(response_key), json.dumps(response), - ex=ELICIT_TTL_SECONDS, ) + # Set TTL on the response list (in case BLPOP doesn't consume it) + await redis.expire(docket.key(response_key), ELICIT_TTL_SECONDS) + # Update status to "responded" await redis.set( docket.key(status_key), diff --git a/src/fastmcp/server/tasks/handlers.py b/src/fastmcp/server/tasks/handlers.py index 02da22148d..be7bddd615 100644 --- a/src/fastmcp/server/tasks/handlers.py +++ b/src/fastmcp/server/tasks/handlers.py @@ -14,9 +14,10 @@ from mcp.shared.exceptions import McpError from mcp.types import INTERNAL_ERROR, ErrorData -from fastmcp.server.dependencies import _current_docket, get_context +from fastmcp.server.dependencies import _current_docket, get_access_token, get_context from fastmcp.server.tasks.config import TaskMeta from fastmcp.server.tasks.keys import build_task_key +from fastmcp.utilities.logging import get_logger if TYPE_CHECKING: from fastmcp.prompts.prompt import Prompt @@ -24,6 +25,8 @@ from fastmcp.resources.template import ResourceTemplate from fastmcp.tools.tool import Tool +logger = get_logger(__name__) + # Redis mapping TTL buffer: Add 15 minutes to Docket's execution_ttl TASK_MAPPING_TTL_BUFFER_SECONDS = 15 * 60 @@ -96,10 +99,21 @@ async def submit_to_docket( f"fastmcp:task:{session_id}:{server_task_id}:poll_interval" ) poll_interval_ms = int(component.task_config.poll_interval.total_seconds() * 1000) + + # Snapshot the current access token (if any) for background task access (#3095) + access_token = get_access_token() + access_token_key = docket.key( + f"fastmcp:task:{session_id}:{server_task_id}:access_token" + ) + async with docket.redis() as redis: await redis.set(task_meta_key, task_key, ex=ttl_seconds) await redis.set(created_at_key, created_at.isoformat(), ex=ttl_seconds) await redis.set(poll_interval_key, str(poll_interval_ms), ex=ttl_seconds) + if access_token is not None: + await redis.set( + access_token_key, access_token.model_dump_json(), ex=ttl_seconds + ) # Register session for Context access in background workers (SEP-1686) # This enables elicitation/sampling from background tasks via weakref @@ -109,21 +123,31 @@ async def submit_to_docket( register_task_session(session_id, ctx.session) - # Send notifications/tasks/created per SEP-1686 (mandatory) - # Send BEFORE queuing to avoid race where task completes before notification - notification = mcp.types.JSONRPCNotification( - jsonrpc="2.0", - method="notifications/tasks/created", - params={}, # Empty params per spec - _meta={ # type: ignore[call-arg] # _meta is Pydantic alias for meta field - "modelcontextprotocol.io/related-task": { + # Send an initial tasks/status notification before queueing. + # This guarantees clients can observe task creation immediately. + notification = mcp.types.TaskStatusNotification.model_validate( + { + "method": "notifications/tasks/status", + "params": { "taskId": server_task_id, - } - }, + "status": "working", + "statusMessage": "Task submitted", + "createdAt": created_at, + "lastUpdatedAt": created_at, + "ttl": ttl_ms, + "pollInterval": poll_interval_ms, + }, + "_meta": { + "io.modelcontextprotocol/related-task": { + "taskId": server_task_id, + } + }, + } ) + server_notification = mcp.types.ServerNotification(notification) with suppress(Exception): # Don't let notification failures break task creation - await ctx.session.send_notification(notification) # type: ignore[arg-type] + await ctx.session.send_notification(server_notification) # Queue function to Docket by key (result storage via execution_ttl) # Use component.add_to_docket() which handles calling conventions @@ -151,6 +175,34 @@ async def submit_to_docket( poll_interval_ms, ) + # Start notification subscriber for distributed elicitation (idempotent) + # This enables ctx.elicit() to work when workers run in separate processes + # Subscriber forwards notifications from Redis queue to client session + from fastmcp.server.tasks.notifications import ( + ensure_subscriber_running, + stop_subscriber, + ) + + try: + await ensure_subscriber_running(session_id, ctx.session, docket, ctx.fastmcp) + + # Register cleanup callback on session exit (once per session) + # This ensures subscriber is stopped when the session disconnects + if ( + hasattr(ctx.session, "_exit_stack") + and ctx.session._exit_stack is not None + and not getattr(ctx.session, "_notification_cleanup_registered", False) + ): + + async def _cleanup_subscriber() -> None: + await stop_subscriber(session_id) + + ctx.session._exit_stack.push_async_callback(_cleanup_subscriber) + ctx.session._notification_cleanup_registered = True # type: ignore[attr-defined] + except Exception as e: + # Non-fatal: elicitation will still work via polling fallback + logger.debug("Failed to start notification subscriber: %s", e) + # Return CreateTaskResult with proper Task object # Tasks MUST begin in "working" status per SEP-1686 final spec (line 381) return mcp.types.CreateTaskResult( diff --git a/src/fastmcp/server/tasks/notifications.py b/src/fastmcp/server/tasks/notifications.py new file mode 100644 index 0000000000..67417bd62d --- /dev/null +++ b/src/fastmcp/server/tasks/notifications.py @@ -0,0 +1,300 @@ +"""Distributed notification queue for background task events (SEP-1686). + +Enables distributed Docket workers to send MCP notifications to clients +without holding session references. Workers push to a Redis queue, +the MCP server process subscribes and forwards to the client's session. + +Pattern: Fire-and-forward with retry +- One queue per session_id +- LPUSH/BRPOP for reliable ordered delivery +- Retry up to 3 times on delivery failure, then discard +- TTL-based expiration for stale messages + +Note: Docket's execution.subscribe() handles task state/progress events via +Redis Pub/Sub. This module handles elicitation-specific notifications that +require reliable delivery (input_required prompts, cancel signals). +""" + +from __future__ import annotations + +import asyncio +import json +import logging +import weakref +from contextlib import suppress +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Any, cast + +import mcp.types + +if TYPE_CHECKING: + from docket import Docket + from mcp.server.session import ServerSession + + from fastmcp.server.server import FastMCP + +logger = logging.getLogger(__name__) + +# Redis key patterns +NOTIFICATION_QUEUE_KEY = "fastmcp:notifications:{session_id}" +NOTIFICATION_ACTIVE_KEY = "fastmcp:notifications:{session_id}:active" + +# Configuration +NOTIFICATION_TTL_SECONDS = 300 # 5 minute message TTL (elicitation response window) +MAX_DELIVERY_ATTEMPTS = 3 # Retry failed deliveries before discarding +SUBSCRIBER_TIMEOUT_SECONDS = 30 # BRPOP timeout (also heartbeat interval) + + +async def push_notification( + session_id: str, + notification: dict[str, Any], + docket: Docket, +) -> None: + """Push notification to session's queue (called from Docket worker). + + Used for elicitation-specific notifications (input_required, cancel) + that need reliable delivery across distributed processes. + + Args: + session_id: Target session's identifier + notification: MCP notification dict (method, params, _meta) + docket: Docket instance for Redis access + """ + key = docket.key(NOTIFICATION_QUEUE_KEY.format(session_id=session_id)) + message = json.dumps( + { + "notification": notification, + "attempt": 0, + "enqueued_at": datetime.now(timezone.utc).isoformat(), + } + ) + async with docket.redis() as redis: + await redis.lpush(key, message) # type: ignore[invalid-await] # redis-py union type (sync/async) + await redis.expire(key, NOTIFICATION_TTL_SECONDS) + + +async def notification_subscriber_loop( + session_id: str, + session: ServerSession, + docket: Docket, + fastmcp: FastMCP, +) -> None: + """Subscribe to notification queue and forward to session. + + Runs in the MCP server process. Bridges distributed workers to clients. + + This loop: + 1. Maintains a heartbeat (active subscriber marker for debugging) + 2. Blocks on BRPOP waiting for notifications + 3. Forwards notifications to the client's session + 4. Retries failed deliveries, then discards (no dead-letter queue) + + Args: + session_id: Session identifier to subscribe to + session: MCP ServerSession for sending notifications + docket: Docket instance for Redis access + fastmcp: FastMCP server instance (for elicitation relay) + """ + queue_key = docket.key(NOTIFICATION_QUEUE_KEY.format(session_id=session_id)) + active_key = docket.key(NOTIFICATION_ACTIVE_KEY.format(session_id=session_id)) + + logger.debug("Starting notification subscriber for session %s", session_id) + + while True: + try: + async with docket.redis() as redis: + # Heartbeat: mark subscriber as active (for distributed debugging) + await redis.set(active_key, "1", ex=SUBSCRIBER_TIMEOUT_SECONDS * 2) + + # Blocking wait for notification (timeout refreshes heartbeat) + # Using BRPOP (right pop) for FIFO order with LPUSH (left push) + result = await cast( + Any, redis.brpop([queue_key], timeout=SUBSCRIBER_TIMEOUT_SECONDS) + ) + if not result: + continue # Timeout - refresh heartbeat and retry + + _, message_bytes = result + message = json.loads(message_bytes) + notification_dict = message["notification"] + attempt = message.get("attempt", 0) + + try: + # Reconstruct and send MCP notification + await _send_mcp_notification( + session, notification_dict, session_id, docket, fastmcp + ) + logger.debug( + "Delivered notification to session %s (attempt %d)", + session_id, + attempt + 1, + ) + except Exception as send_error: + # Delivery failed - retry or discard + if attempt < MAX_DELIVERY_ATTEMPTS - 1: + # Re-queue with incremented attempt (back of queue) + message["attempt"] = attempt + 1 + message["last_error"] = str(send_error) + await redis.lpush(queue_key, json.dumps(message)) # type: ignore[invalid-await] + logger.debug( + "Requeued notification for session %s (attempt %d): %s", + session_id, + attempt + 2, + send_error, + ) + else: + # Discard after max attempts (session likely disconnected) + logger.warning( + "Discarding notification for session %s after %d attempts: %s", + session_id, + MAX_DELIVERY_ATTEMPTS, + send_error, + ) + + except asyncio.CancelledError: + # Graceful shutdown - leave pending messages in queue for reconnect + logger.debug("Notification subscriber cancelled for session %s", session_id) + break + except Exception as e: + logger.debug( + "Notification subscriber error for session %s: %s", session_id, e + ) + await asyncio.sleep(1) # Backoff on error + + +async def _send_mcp_notification( + session: ServerSession, + notification_dict: dict[str, Any], + session_id: str, + docket: Docket, + fastmcp: FastMCP, +) -> None: + """Reconstruct MCP notification from dict and send to session. + + For input_required notifications with elicitation metadata, also sends + a standard elicitation/create request to the client and relays the + response back to the worker via Redis. + + Args: + session: MCP ServerSession + notification_dict: Notification as dict (method, params, _meta) + session_id: Session identifier (for elicitation relay) + docket: Docket instance (for notification delivery) + fastmcp: FastMCP server instance (for elicitation relay) + """ + method = notification_dict.get("method", "notifications/tasks/status") + if method != "notifications/tasks/status": + raise ValueError(f"Unsupported notification method for subscriber: {method}") + + notification = mcp.types.TaskStatusNotification.model_validate( + { + "method": "notifications/tasks/status", + "params": notification_dict.get("params", {}), + "_meta": notification_dict.get("_meta"), + } + ) + server_notification = mcp.types.ServerNotification(notification) + + await session.send_notification(server_notification) + + # If this is an input_required notification with elicitation metadata, + # relay the elicitation to the client via standard elicitation/create + params = notification_dict.get("params", {}) + if params.get("status") == "input_required": + meta = notification_dict.get("_meta", {}) + related_task = meta.get("io.modelcontextprotocol/related-task", {}) + elicitation = related_task.get("elicitation") + if elicitation: + task_id = params.get("taskId") + if not task_id: + logger.warning( + "input_required notification missing taskId, skipping relay" + ) + return + from fastmcp.server.tasks.elicitation import relay_elicitation + + task = asyncio.create_task( + relay_elicitation(session, session_id, task_id, elicitation, fastmcp), + name=f"elicitation-relay-{task_id[:8]}", + ) + _background_tasks.add(task) + task.add_done_callback(_background_tasks.discard) + + +# ============================================================================= +# Subscriber Management +# ============================================================================= + +# Strong references to fire-and-forget relay tasks (prevent GC mid-flight) +_background_tasks: set[asyncio.Task[None]] = set() + +# Registry of active subscribers per session (prevents duplicates) +# Uses weakref to session to detect disconnects +_active_subscribers: dict[ + str, tuple[asyncio.Task[None], weakref.ref[ServerSession]] +] = {} + + +async def ensure_subscriber_running( + session_id: str, + session: ServerSession, + docket: Docket, + fastmcp: FastMCP, +) -> None: + """Start notification subscriber if not already running (idempotent). + + Subscriber is created on first task submission and cleaned up on disconnect. + Safe to call multiple times for the same session. + + Args: + session_id: Session identifier + session: MCP ServerSession + docket: Docket instance + fastmcp: FastMCP server instance (for elicitation relay) + """ + # Check if subscriber already running for this session + if session_id in _active_subscribers: + task, session_ref = _active_subscribers[session_id] + # Check if task is still running AND session is still alive + if not task.done() and session_ref() is not None: + return # Already running + + # Task finished or session dead - clean up + if not task.done(): + task.cancel() + with suppress(asyncio.CancelledError): + await task + del _active_subscribers[session_id] + + # Start new subscriber task + task = asyncio.create_task( + notification_subscriber_loop(session_id, session, docket, fastmcp), + name=f"notification-subscriber-{session_id[:8]}", + ) + _active_subscribers[session_id] = (task, weakref.ref(session)) + logger.debug("Started notification subscriber for session %s", session_id) + + +async def stop_subscriber(session_id: str) -> None: + """Stop notification subscriber for a session. + + Called when session disconnects. Pending messages remain in queue + for delivery if client reconnects (with TTL expiration). + + Args: + session_id: Session identifier + """ + if session_id not in _active_subscribers: + return + + task, _ = _active_subscribers.pop(session_id) + if not task.done(): + task.cancel() + with suppress(asyncio.CancelledError): + await task + logger.debug("Stopped notification subscriber for session %s", session_id) + + +def get_subscriber_count() -> int: + """Get number of active subscribers (for monitoring).""" + return len(_active_subscribers) diff --git a/src/fastmcp/server/tasks/requests.py b/src/fastmcp/server/tasks/requests.py index 61286d8316..fae63c08d3 100644 --- a/src/fastmcp/server/tasks/requests.py +++ b/src/fastmcp/server/tasks/requests.py @@ -300,7 +300,7 @@ async def tasks_result_handler(server: FastMCP, params: dict[str, Any]) -> Any: content=[mcp.types.TextContent(type="text", text=str(error))], isError=True, _meta={ # type: ignore[call-arg] # _meta is Pydantic alias for meta field - "modelcontextprotocol.io/related-task": { + "io.modelcontextprotocol/related-task": { "taskId": client_task_id, } }, @@ -342,7 +342,7 @@ async def tasks_result_handler(server: FastMCP, params: dict[str, Any]) -> Any: # Build related-task metadata related_task_meta = { - "modelcontextprotocol.io/related-task": { + "io.modelcontextprotocol/related-task": { "taskId": client_task_id, } } diff --git a/src/fastmcp/server/transforms/visibility.py b/src/fastmcp/server/transforms/visibility.py index 41061a6510..5a35888866 100644 --- a/src/fastmcp/server/transforms/visibility.py +++ b/src/fastmcp/server/transforms/visibility.py @@ -171,23 +171,23 @@ def _matches(self, component: FastMCPComponent) -> bool: return self.tags is None or bool(component.tags & self.tags) def _mark_component(self, component: T) -> T: - """Set visibility state in component metadata if rule matches.""" + """Set visibility state in component metadata if rule matches. + + Returns a copy of the component with updated metadata to avoid + mutating shared objects cached in providers. + """ if not self._matches(component): return component - # Create new dicts to avoid mutating shared dicts - # (e.g., when Tool.from_tool shares the meta dict between tools) if component.meta is None: - component.meta = { - _FASTMCP_KEY: {_INTERNAL_KEY: {"visibility": self._enabled}} - } + new_meta = {_FASTMCP_KEY: {_INTERNAL_KEY: {"visibility": self._enabled}}} else: old_fastmcp = component.meta.get(_FASTMCP_KEY, {}) old_internal = old_fastmcp.get(_INTERNAL_KEY, {}) new_internal = {**old_internal, "visibility": self._enabled} new_fastmcp = {**old_fastmcp, _INTERNAL_KEY: new_internal} - component.meta = {**component.meta, _FASTMCP_KEY: new_fastmcp} - return component + new_meta = {**component.meta, _FASTMCP_KEY: new_fastmcp} + return component.model_copy(update={"meta": new_meta}) # ------------------------------------------------------------------------- # Transform methods (mark components, don't filter) diff --git a/src/fastmcp/settings.py b/src/fastmcp/settings.py index 0257e8d3bd..561a80437b 100644 --- a/src/fastmcp/settings.py +++ b/src/fastmcp/settings.py @@ -2,7 +2,6 @@ import inspect import os -import warnings from datetime import timedelta from pathlib import Path from typing import Annotated, Any, Literal @@ -115,30 +114,6 @@ class DocketSettings(BaseSettings): ] = timedelta(seconds=5) -class ExperimentalSettings(BaseSettings): - model_config = SettingsConfigDict( - env_prefix="FASTMCP_EXPERIMENTAL_", - extra="ignore", - validate_assignment=True, - ) - - # Deprecated in 2.14 - the new OpenAPI parser is now the default and only parser - enable_new_openapi_parser: bool = False - - @field_validator("enable_new_openapi_parser", mode="after") - @classmethod - def _warn_openapi_parser_deprecated(cls, v: bool) -> bool: - if v: - warnings.warn( - "enable_new_openapi_parser is deprecated. " - "The new OpenAPI parser is now the default (and only) parser. " - "You can remove this setting.", - DeprecationWarning, - stacklevel=2, - ) - return v - - class Settings(BaseSettings): """FastMCP settings.""" @@ -191,8 +166,6 @@ def normalize_log_level(cls, v): return v.upper() return v - experimental: ExperimentalSettings = ExperimentalSettings() - docket: DocketSettings = DocketSettings() enable_rich_logging: Annotated[ diff --git a/src/fastmcp/tools/function_parsing.py b/src/fastmcp/tools/function_parsing.py index a2af143d13..0c9e8718ad 100644 --- a/src/fastmcp/tools/function_parsing.py +++ b/src/fastmcp/tools/function_parsing.py @@ -28,8 +28,8 @@ ) try: - from prefab_ui import UIResponse as _PrefabUIResponse from prefab_ui.components.base import Component as _PrefabComponent + from prefab_ui.response import UIResponse as _PrefabUIResponse _PREFAB_TYPES: tuple[type, ...] = (_PrefabUIResponse, _PrefabComponent) except ImportError: diff --git a/src/fastmcp/tools/function_tool.py b/src/fastmcp/tools/function_tool.py index d8f46faa0e..e88828a80a 100644 --- a/src/fastmcp/tools/function_tool.py +++ b/src/fastmcp/tools/function_tool.py @@ -26,11 +26,11 @@ import fastmcp from fastmcp.decorators import resolve_task_config +from fastmcp.server.auth.authorization import AuthCheck from fastmcp.server.dependencies import without_injected_parameters from fastmcp.server.tasks.config import TaskConfig from fastmcp.tools.function_parsing import ParsedFunction, _is_object_schema from fastmcp.tools.tool import ( - AuthCheckCallable, Tool, ToolResult, ToolResultSerializerType, @@ -80,7 +80,7 @@ class ToolMeta: exclude_args: list[str] | None = None serializer: Any | None = None timeout: float | None = None - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None + auth: AuthCheck | list[AuthCheck] | None = None enabled: bool = True @@ -126,7 +126,7 @@ def from_function( meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionTool: """Create a FunctionTool from a function. @@ -349,7 +349,7 @@ def tool( exclude_args: list[str] | None = None, serializer: Any | None = None, timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[F], F]: ... @overload def tool( @@ -368,7 +368,7 @@ def tool( exclude_args: list[str] | None = None, serializer: Any | None = None, timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Callable[[F], F]: ... @@ -388,7 +388,7 @@ def tool( exclude_args: list[str] | None = None, serializer: Any | None = None, timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> Any: """Standalone decorator to mark a function as an MCP tool. diff --git a/src/fastmcp/tools/tool.py b/src/fastmcp/tools/tool.py index 0350d20fc0..2f5aa57b01 100644 --- a/src/fastmcp/tools/tool.py +++ b/src/fastmcp/tools/tool.py @@ -26,6 +26,7 @@ from pydantic import BaseModel, Field, model_validator from pydantic.json_schema import SkipJsonSchema +from fastmcp.server.auth.authorization import AuthCheck from fastmcp.server.tasks.config import TaskConfig, TaskMeta from fastmcp.utilities.components import FastMCPComponent from fastmcp.utilities.logging import get_logger @@ -38,8 +39,8 @@ ) try: - from prefab_ui import UIResponse as _PrefabUIResponse from prefab_ui.components.base import Component as _PrefabComponent + from prefab_ui.response import UIResponse as _PrefabUIResponse _HAS_PREFAB = True except ImportError: @@ -155,7 +156,7 @@ class Tool(FastMCPComponent): ), ] = None auth: Annotated[ - SkipJsonSchema[AuthCheckCallable | list[AuthCheckCallable] | None], + SkipJsonSchema[AuthCheck | list[AuthCheck] | None], Field(description="Authorization checks for this tool", exclude=True), ] = None timeout: Annotated[ @@ -215,7 +216,7 @@ def from_function( meta: dict[str, Any] | None = None, task: bool | TaskConfig | None = None, timeout: float | None = None, - auth: AuthCheckCallable | list[AuthCheckCallable] | None = None, + auth: AuthCheck | list[AuthCheck] | None = None, ) -> FunctionTool: """Create a Tool from a function.""" from fastmcp.tools.function_tool import FunctionTool diff --git a/src/fastmcp/utilities/json_schema.py b/src/fastmcp/utilities/json_schema.py index f302e1cd11..da713f802d 100644 --- a/src/fastmcp/utilities/json_schema.py +++ b/src/fastmcp/utilities/json_schema.py @@ -366,15 +366,11 @@ def compress_schema( prune_params: list[str] | None = None, prune_additional_properties: bool = False, prune_titles: bool = False, + dereference: bool = False, ) -> dict[str, Any]: """ Compress and optimize a JSON schema for MCP compatibility. - This function dereferences all $ref entries (inlining definitions) to ensure - compatibility with MCP clients that don't properly handle $ref in schemas - (e.g., VS Code Copilot). It also applies various optimizations to reduce - schema size. - Args: schema: The schema to compress prune_params: List of parameter names to remove from properties @@ -382,22 +378,27 @@ def compress_schema( Defaults to False to maintain MCP client compatibility, as some clients (e.g., Claude) require additionalProperties: false for strict validation. prune_titles: Whether to remove title fields from the schema + dereference: Whether to dereference $ref by inlining definitions. + Defaults to False; dereferencing is typically handled by + middleware at serve-time instead. """ - # Dereference $ref - this inlines all definitions and removes $defs - # Required for MCP client compatibility - schema = dereference_refs(schema) + if dereference: + schema = dereference_refs(schema) + + # Resolve root-level $ref for MCP spec compliance (requires type: object at root) + schema = resolve_root_ref(schema) # Remove specific parameters if requested for param in prune_params or []: schema = _prune_param(schema, param=param) - # Apply combined optimizations in a single tree traversal - if prune_titles or prune_additional_properties: - schema = _single_pass_optimize( - schema, - prune_titles=prune_titles, - prune_additional_properties=prune_additional_properties, - prune_defs=False, - ) + # Apply combined optimizations in a single tree traversal. + # Always prune unused $defs to keep schemas clean after parameter removal. + schema = _single_pass_optimize( + schema, + prune_titles=prune_titles, + prune_additional_properties=prune_additional_properties, + prune_defs=True, + ) return schema diff --git a/src/fastmcp/utilities/openapi/__init__.py b/src/fastmcp/utilities/openapi/__init__.py index f71bc7a6a7..eb25666d1d 100644 --- a/src/fastmcp/utilities/openapi/__init__.py +++ b/src/fastmcp/utilities/openapi/__init__.py @@ -20,7 +20,6 @@ format_deep_object_parameter, format_description_with_responses, format_json_for_description, - format_simple_description, generate_example_from_schema, ) @@ -57,7 +56,6 @@ "format_deep_object_parameter", "format_description_with_responses", "format_json_for_description", - "format_simple_description", "generate_example_from_schema", "parse_openapi_to_http_routes", ] diff --git a/src/fastmcp/utilities/openapi/director.py b/src/fastmcp/utilities/openapi/director.py index 2efc8e74cd..58e941ba7e 100644 --- a/src/fastmcp/utilities/openapi/director.py +++ b/src/fastmcp/utilities/openapi/director.py @@ -166,12 +166,18 @@ def _unflatten_arguments( body = None if body_props: # If we have body properties, construct the body object - if route.request_body and route.request_body.content_schema: - # Check if the request body expects an object with properties + if ( + route.request_body + and route.request_body.content_schema + and len(route.request_body.content_schema) > 0 + ): content_type = next(iter(route.request_body.content_schema)) body_schema = route.request_body.content_schema[content_type] - if body_schema.get("type") == "object": + if ( + isinstance(body_schema, dict) + and body_schema.get("type") == "object" + ): body = body_props elif len(body_props) == 1: # If body schema is not an object and we have exactly one property, diff --git a/src/fastmcp/utilities/openapi/formatters.py b/src/fastmcp/utilities/openapi/formatters.py index 27580fcdd5..a0bd75bef2 100644 --- a/src/fastmcp/utilities/openapi/formatters.py +++ b/src/fastmcp/utilities/openapi/formatters.py @@ -189,39 +189,6 @@ def format_json_for_description(data: Any, indent: int = 2) -> str: return f"```\nCould not serialize to JSON: {data}\n```" -def format_simple_description( - base_description: str, - parameters: list[ParameterInfo] | None = None, - request_body: RequestBodyInfo | None = None, -) -> str: - """ - Formats a simple description for MCP objects (tools, resources, prompts). - Excludes response details, examples, and verbose status codes. - - Args: - base_description (str): The initial description to be formatted. - parameters (list[ParameterInfo] | None, optional): A list of parameter information. - request_body (RequestBodyInfo | None, optional): Information about the request body. - - Returns: - str: The formatted description string with minimal details. - """ - desc_parts = [base_description] - - # Only add critical parameter information if they have descriptions - if parameters: - path_params = [p for p in parameters if p.location == "path" and p.description] - if path_params: - desc_parts.append("\n\n**Path Parameters:**") - for param in path_params: - desc_parts.append(f"\n- **{param.name}**: {param.description}") - - # Skip query parameters, request body details, and all response information - # These are already captured in the inputSchema - - return "\n".join(desc_parts) - - def format_description_with_responses( base_description: str, responses: dict[ @@ -384,6 +351,5 @@ def format_description_with_responses( "format_deep_object_parameter", "format_description_with_responses", "format_json_for_description", - "format_simple_description", "generate_example_from_schema", ] diff --git a/tests/client/auth/test_oauth_static_client.py b/tests/client/auth/test_oauth_static_client.py new file mode 100644 index 0000000000..c9f17cdbe3 --- /dev/null +++ b/tests/client/auth/test_oauth_static_client.py @@ -0,0 +1,274 @@ +"""Tests for OAuth static client registration (pre-registered client_id/client_secret).""" + +from unittest.mock import patch + +import httpx +import pytest +from mcp.shared.auth import OAuthClientInformationFull +from pydantic import AnyUrl + +from fastmcp.client import Client +from fastmcp.client.auth import OAuth +from fastmcp.client.auth.oauth import ClientNotFoundError +from fastmcp.client.transports import StreamableHttpTransport +from fastmcp.server.auth.auth import ClientRegistrationOptions +from fastmcp.server.auth.providers.in_memory import InMemoryOAuthProvider +from fastmcp.server.server import FastMCP +from fastmcp.utilities.http import find_available_port +from fastmcp.utilities.tests import HeadlessOAuth, run_server_async + + +class TestStaticClientInfoConstruction: + """Static client info should include full metadata from client_metadata.""" + + def test_static_client_info_includes_metadata(self): + """Static client info should include redirect_uris, grant_types, etc.""" + oauth = OAuth( + mcp_url="https://example.com/mcp", + client_id="my-client-id", + client_secret="my-secret", + scopes=["read", "write"], + ) + + info = oauth._static_client_info + assert info is not None + assert info.client_id == "my-client-id" + assert info.client_secret == "my-secret" + # Metadata fields should be populated from client_metadata + assert info.redirect_uris is not None + assert len(info.redirect_uris) == 1 + assert info.grant_types is not None + assert "authorization_code" in info.grant_types + assert "refresh_token" in info.grant_types + assert info.response_types is not None + assert "code" in info.response_types + assert info.scope == "read write" + assert info.token_endpoint_auth_method == "client_secret_post" + + def test_static_client_info_without_secret(self): + """Public clients can provide client_id without client_secret.""" + oauth = OAuth( + mcp_url="https://example.com/mcp", + client_id="public-client", + ) + + info = oauth._static_client_info + assert info is not None + assert info.client_id == "public-client" + assert info.client_secret is None + assert info.token_endpoint_auth_method == "none" + # Metadata should still be present + assert info.redirect_uris is not None + assert info.grant_types is not None + + def test_no_static_client_info_without_client_id(self): + """When no client_id is provided, _static_client_info should be None.""" + oauth = OAuth(mcp_url="https://example.com/mcp") + assert oauth._static_client_info is None + + def test_static_client_info_includes_additional_metadata(self): + """Additional client metadata should be included in static client info.""" + oauth = OAuth( + mcp_url="https://example.com/mcp", + client_id="my-client", + additional_client_metadata={ + "token_endpoint_auth_method": "client_secret_post" + }, + ) + + info = oauth._static_client_info + assert info is not None + assert info.token_endpoint_auth_method == "client_secret_post" + + +class TestStaticClientInitialize: + """_initialize should set context.client_info and persist to storage.""" + + async def test_initialize_sets_context_client_info(self): + """_initialize should inject static client info into the auth context.""" + oauth = OAuth( + mcp_url="https://example.com/mcp", + client_id="my-client", + client_secret="my-secret", + ) + + # Mock the parent _initialize since it needs a real server + with patch.object(OAuth.__bases__[0], "_initialize", return_value=None): + await oauth._initialize() + + assert oauth.context.client_info is not None + assert oauth.context.client_info.client_id == "my-client" + assert oauth.context.client_info.client_secret == "my-secret" + + async def test_initialize_persists_static_client_to_storage(self): + """Static client info should be persisted to token storage.""" + oauth = OAuth( + mcp_url="https://example.com/mcp", + client_id="my-client", + client_secret="my-secret", + ) + + with patch.object(OAuth.__bases__[0], "_initialize", return_value=None): + await oauth._initialize() + + # Verify it was persisted to storage + stored = await oauth.token_storage_adapter.get_client_info() + assert stored is not None + assert stored.client_id == "my-client" + + async def test_initialize_without_static_creds_works(self): + """_initialize should not error when no static credentials are provided.""" + oauth = OAuth(mcp_url="https://example.com/mcp") + + with patch.object(OAuth.__bases__[0], "_initialize", return_value=None): + # This should not raise AttributeError + await oauth._initialize() + + # context.client_info should be whatever the parent set (None by default) + + +class TestStaticClientRetryBehavior: + """Retry-on-stale-credentials should short-circuit for static creds.""" + + async def test_retry_skipped_with_static_creds(self): + """When static creds are rejected, should raise immediately, not retry.""" + oauth = OAuth( + mcp_url="https://example.com/mcp", + client_id="bad-client-id", + client_secret="bad-secret", + ) + + # Make the parent auth flow raise ClientNotFoundError + async def failing_auth_flow(request): + raise ClientNotFoundError("client not found") + yield # make it a generator # noqa: E275 + + with patch.object( + OAuth.__bases__[0], "async_auth_flow", side_effect=failing_auth_flow + ): + flow = oauth.async_auth_flow(httpx.Request("GET", "https://example.com")) + with pytest.raises(ClientNotFoundError, match="static client credentials"): + await flow.__anext__() + + async def test_retry_still_works_without_static_creds(self): + """Without static creds, the retry behavior should be preserved.""" + oauth = OAuth(mcp_url="https://example.com/mcp") + + call_count = 0 + + async def auth_flow_with_retry(request): + nonlocal call_count + call_count += 1 + if call_count == 1: + raise ClientNotFoundError("client not found") + # Second attempt succeeds + yield httpx.Request("GET", "https://example.com") + + with patch.object( + OAuth.__bases__[0], "async_auth_flow", side_effect=auth_flow_with_retry + ): + flow = oauth.async_auth_flow(httpx.Request("GET", "https://example.com")) + request = await flow.__anext__() + assert request is not None + assert call_count == 2 + + +class TestStaticClientE2E: + """End-to-end tests with a real OAuth server using pre-registered clients.""" + + async def test_static_client_with_dcr_disabled(self): + """Static client_id should work when the server has DCR disabled.""" + port = find_available_port() + callback_port = find_available_port() + issuer_url = f"http://127.0.0.1:{port}" + + provider = InMemoryOAuthProvider( + base_url=issuer_url, + client_registration_options=ClientRegistrationOptions( + enabled=False, # DCR disabled + valid_scopes=["read", "write"], + ), + ) + + server = FastMCP("TestServer", auth=provider) + + @server.tool + def greet(name: str) -> str: + return f"Hello, {name}!" + + # Pre-register a client directly in the provider. + # The redirect_uri must match what the OAuth client will use. + pre_registered = OAuthClientInformationFull( + client_id="pre-registered-client", + client_secret="pre-registered-secret", + redirect_uris=[AnyUrl(f"http://localhost:{callback_port}/callback")], + grant_types=["authorization_code", "refresh_token"], + response_types=["code"], + token_endpoint_auth_method="client_secret_post", + scope="read write", + ) + await provider.register_client(pre_registered) + + async with run_server_async(server, port=port, transport="http") as url: + oauth = HeadlessOAuth( + mcp_url=url, + client_id="pre-registered-client", + client_secret="pre-registered-secret", + scopes=["read", "write"], + callback_port=callback_port, + ) + + async with Client( + transport=StreamableHttpTransport(url), + auth=oauth, + ) as client: + assert await client.ping() + tools = await client.list_tools() + assert any(t.name == "greet" for t in tools) + + async def test_static_client_with_dcr_enabled(self): + """Static client_id should also work when DCR is enabled (skips DCR).""" + port = find_available_port() + callback_port = find_available_port() + issuer_url = f"http://127.0.0.1:{port}" + + provider = InMemoryOAuthProvider( + base_url=issuer_url, + client_registration_options=ClientRegistrationOptions( + enabled=True, + valid_scopes=["read"], + ), + ) + + server = FastMCP("TestServer", auth=provider) + + @server.tool + def add(a: int, b: int) -> int: + return a + b + + pre_registered = OAuthClientInformationFull( + client_id="my-app", + client_secret="my-secret", + redirect_uris=[AnyUrl(f"http://localhost:{callback_port}/callback")], + grant_types=["authorization_code", "refresh_token"], + response_types=["code"], + token_endpoint_auth_method="client_secret_post", + scope="read", + ) + await provider.register_client(pre_registered) + + async with run_server_async(server, port=port, transport="http") as url: + oauth = HeadlessOAuth( + mcp_url=url, + client_id="my-app", + client_secret="my-secret", + scopes=["read"], + callback_port=callback_port, + ) + + async with Client( + transport=StreamableHttpTransport(url), + auth=oauth, + ) as client: + result = await client.call_tool("add", {"a": 3, "b": 4}) + assert result.data == 7 diff --git a/tests/client/test_sampling.py b/tests/client/test_sampling.py index e379d6707d..e5a45adc76 100644 --- a/tests/client/test_sampling.py +++ b/tests/client/test_sampling.py @@ -563,6 +563,492 @@ async def test_exception(context: Context) -> str: assert "Tool failed intentionally" in error_text assert result.data == "Handled error" + async def test_concurrent_tool_execution_default_sequential(self): + """Test that tools execute sequentially by default.""" + import asyncio + import time + + from mcp.types import CreateMessageResultWithTools, ToolUseContent + + execution_order: list[tuple[str, float]] = [] + + async def slow_tool_a(x: int) -> int: + """Slow tool A.""" + start = time.time() + execution_order.append(("tool_a_start", start)) + await asyncio.sleep(0.1) + execution_order.append(("tool_a_end", time.time())) + return x * 2 + + async def slow_tool_b(y: int) -> int: + """Slow tool B.""" + start = time.time() + execution_order.append(("tool_b_start", start)) + await asyncio.sleep(0.1) + execution_order.append(("tool_b_end", time.time())) + return y + 10 + + call_count = 0 + + def sampling_handler( + messages: list[SamplingMessage], params: SamplingParams, ctx: RequestContext + ) -> CreateMessageResultWithTools: + nonlocal call_count + call_count += 1 + + if call_count == 1: + return CreateMessageResultWithTools( + role="assistant", + content=[ + ToolUseContent( + type="tool_use", + id="call_a", + name="slow_tool_a", + input={"x": 5}, + ), + ToolUseContent( + type="tool_use", + id="call_b", + name="slow_tool_b", + input={"y": 3}, + ), + ], + model="test-model", + stopReason="toolUse", + ) + else: + return CreateMessageResultWithTools( + role="assistant", + content=[TextContent(type="text", text="Done!")], + model="test-model", + stopReason="endTurn", + ) + + mcp = FastMCP(sampling_handler=sampling_handler) + + @mcp.tool + async def test_tool(context: Context) -> str: + result = await context.sample( + messages="Run tools", + tools=[slow_tool_a, slow_tool_b], + # Default: tool_concurrency=None (sequential) + ) + return result.text or "" + + async with Client(mcp) as client: + result = await client.call_tool("test_tool", {}) + + assert result.data == "Done!" + # Verify sequential execution: tool_a must complete before tool_b starts + events = [e[0] for e in execution_order] + assert events == ["tool_a_start", "tool_a_end", "tool_b_start", "tool_b_end"] + + async def test_concurrent_tool_execution_unlimited(self): + """Test unlimited parallel tool execution with tool_concurrency=0.""" + import asyncio + import time + + from mcp.types import CreateMessageResultWithTools, ToolUseContent + + execution_times: dict[str, dict[str, float]] = {} + + async def slow_tool_a(x: int) -> int: + """Slow tool A.""" + execution_times["tool_a"] = {"start": time.time()} + await asyncio.sleep(0.1) + execution_times["tool_a"]["end"] = time.time() + return x * 2 + + async def slow_tool_b(y: int) -> int: + """Slow tool B.""" + execution_times["tool_b"] = {"start": time.time()} + await asyncio.sleep(0.1) + execution_times["tool_b"]["end"] = time.time() + return y + 10 + + call_count = 0 + + def sampling_handler( + messages: list[SamplingMessage], params: SamplingParams, ctx: RequestContext + ) -> CreateMessageResultWithTools: + nonlocal call_count + call_count += 1 + + if call_count == 1: + return CreateMessageResultWithTools( + role="assistant", + content=[ + ToolUseContent( + type="tool_use", + id="call_a", + name="slow_tool_a", + input={"x": 5}, + ), + ToolUseContent( + type="tool_use", + id="call_b", + name="slow_tool_b", + input={"y": 3}, + ), + ], + model="test-model", + stopReason="toolUse", + ) + else: + return CreateMessageResultWithTools( + role="assistant", + content=[TextContent(type="text", text="Done!")], + model="test-model", + stopReason="endTurn", + ) + + mcp = FastMCP(sampling_handler=sampling_handler) + + @mcp.tool + async def test_tool(context: Context) -> str: + result = await context.sample( + messages="Run tools", + tools=[slow_tool_a, slow_tool_b], + tool_concurrency=0, # Unlimited parallel + ) + return result.text or "" + + async with Client(mcp) as client: + result = await client.call_tool("test_tool", {}) + + assert result.data == "Done!" + # Verify parallel execution: both tools should overlap in time + assert "tool_a" in execution_times + assert "tool_b" in execution_times + # tool_b should start before tool_a finishes (overlap) + assert execution_times["tool_b"]["start"] < execution_times["tool_a"]["end"] + + async def test_concurrent_tool_execution_bounded(self): + """Test bounded parallel execution with tool_concurrency=2.""" + import asyncio + import time + + from mcp.types import CreateMessageResultWithTools, ToolUseContent + + execution_order: list[tuple[str, float]] = [] + + async def slow_tool(name: str, duration: float = 0.1) -> str: + """Generic slow tool.""" + execution_order.append((f"{name}_start", time.time())) + await asyncio.sleep(duration) + execution_order.append((f"{name}_end", time.time())) + return f"{name} done" + + call_count = 0 + + def sampling_handler( + messages: list[SamplingMessage], params: SamplingParams, ctx: RequestContext + ) -> CreateMessageResultWithTools: + nonlocal call_count + call_count += 1 + + if call_count == 1: + # Request 3 tools (with concurrency=2, first 2 run parallel, then 3rd) + return CreateMessageResultWithTools( + role="assistant", + content=[ + ToolUseContent( + type="tool_use", + id="call_1", + name="slow_tool", + input={"name": "tool_1", "duration": 0.1}, + ), + ToolUseContent( + type="tool_use", + id="call_2", + name="slow_tool", + input={"name": "tool_2", "duration": 0.1}, + ), + ToolUseContent( + type="tool_use", + id="call_3", + name="slow_tool", + input={"name": "tool_3", "duration": 0.05}, + ), + ], + model="test-model", + stopReason="toolUse", + ) + else: + return CreateMessageResultWithTools( + role="assistant", + content=[TextContent(type="text", text="Done!")], + model="test-model", + stopReason="endTurn", + ) + + mcp = FastMCP(sampling_handler=sampling_handler) + + @mcp.tool + async def test_tool(context: Context) -> str: + result = await context.sample( + messages="Run tools", + tools=[slow_tool], + tool_concurrency=2, # Max 2 concurrent + ) + return result.text or "" + + async with Client(mcp) as client: + result = await client.call_tool("test_tool", {}) + + assert result.data == "Done!" + # Verify that at most 2 tools run concurrently + events = [e[0] for e in execution_order] + # First 2 tools should start before either ends + assert events[0] in ["tool_1_start", "tool_2_start"] + assert events[1] in ["tool_1_start", "tool_2_start"] + # Third tool should start after at least one of the first two finishes + tool_3_start_idx = events.index("tool_3_start") + assert ( + "tool_1_end" in events[:tool_3_start_idx] + or "tool_2_end" in events[:tool_3_start_idx] + ) + + async def test_sequential_tool_forces_sequential_execution(self): + """Test that sequential=True forces all tools to execute sequentially.""" + import asyncio + import time + + from mcp.types import CreateMessageResultWithTools, ToolUseContent + + execution_order: list[tuple[str, float]] = [] + + async def normal_tool(x: int) -> int: + """Normal tool.""" + execution_order.append(("normal_start", time.time())) + await asyncio.sleep(0.05) + execution_order.append(("normal_end", time.time())) + return x * 2 + + async def sequential_tool(y: int) -> int: + """Sequential tool.""" + execution_order.append(("sequential_start", time.time())) + await asyncio.sleep(0.05) + execution_order.append(("sequential_end", time.time())) + return y + 10 + + call_count = 0 + + def sampling_handler( + messages: list[SamplingMessage], params: SamplingParams, ctx: RequestContext + ) -> CreateMessageResultWithTools: + nonlocal call_count + call_count += 1 + + if call_count == 1: + return CreateMessageResultWithTools( + role="assistant", + content=[ + ToolUseContent( + type="tool_use", + id="call_1", + name="normal_tool", + input={"x": 5}, + ), + ToolUseContent( + type="tool_use", + id="call_2", + name="sequential_tool", + input={"y": 3}, + ), + ], + model="test-model", + stopReason="toolUse", + ) + else: + return CreateMessageResultWithTools( + role="assistant", + content=[TextContent(type="text", text="Done!")], + model="test-model", + stopReason="endTurn", + ) + + mcp = FastMCP(sampling_handler=sampling_handler) + + @mcp.tool + async def test_tool(context: Context) -> str: + # Create tools with sequential=True for one of them + normal = SamplingTool.from_function(normal_tool, sequential=False) + sequential = SamplingTool.from_function(sequential_tool, sequential=True) + + result = await context.sample( + messages="Run tools", + tools=[normal, sequential], + tool_concurrency=0, # Request unlimited, but sequential tool forces sequential + ) + return result.text or "" + + async with Client(mcp) as client: + result = await client.call_tool("test_tool", {}) + + assert result.data == "Done!" + # Verify sequential execution: first tool must complete before second starts + events = [e[0] for e in execution_order] + assert events[0] in ["normal_start", "sequential_start"] + assert events[1] in ["normal_end", "sequential_end"] + # Ensure the second tool starts after the first ends + if events[0] == "normal_start": + assert events[1] == "normal_end" + assert events[2] == "sequential_start" + else: + assert events[1] == "sequential_end" + assert events[2] == "normal_start" + + async def test_concurrent_tool_execution_error_handling(self): + """Test that errors are captured per-tool in parallel execution.""" + from mcp.types import ( + CreateMessageResultWithTools, + ToolResultContent, + ToolUseContent, + ) + + def good_tool() -> str: + return "success" + + def bad_tool() -> str: + raise ValueError("Tool error") + + messages_received: list[list[SamplingMessage]] = [] + + def sampling_handler( + messages: list[SamplingMessage], params: SamplingParams, ctx: RequestContext + ) -> CreateMessageResultWithTools: + messages_received.append(list(messages)) + + if len(messages_received) == 1: + return CreateMessageResultWithTools( + role="assistant", + content=[ + ToolUseContent( + type="tool_use", id="call_1", name="good_tool", input={} + ), + ToolUseContent( + type="tool_use", id="call_2", name="bad_tool", input={} + ), + ], + model="test-model", + stopReason="toolUse", + ) + else: + return CreateMessageResultWithTools( + role="assistant", + content=[TextContent(type="text", text="Handled errors")], + model="test-model", + stopReason="endTurn", + ) + + mcp = FastMCP(sampling_handler=sampling_handler) + + @mcp.tool + async def test_tool(context: Context) -> str: + result = await context.sample( + messages="Run tools", + tools=[good_tool, bad_tool], + tool_concurrency=0, # Parallel execution + ) + return result.text or "" + + async with Client(mcp) as client: + result = await client.call_tool("test_tool", {}) + + assert result.data == "Handled errors" + # Check that tool results include both success and error + tool_result_message = messages_received[1][-1] + assert tool_result_message.role == "user" + tool_results = cast(list[ToolResultContent], tool_result_message.content) + assert len(tool_results) == 2 + # One should be success, one should be error + assert any(not r.isError for r in tool_results) + assert any(r.isError for r in tool_results) + + async def test_concurrent_tool_result_order_preserved(self): + """Test that tool results maintain the same order as tool calls.""" + import asyncio + + from mcp.types import ( + CreateMessageResultWithTools, + ToolResultContent, + ToolUseContent, + ) + + async def tool_with_delay(value: int, delay: float) -> int: + """Tool that takes variable time.""" + await asyncio.sleep(delay) + return value + + messages_received: list[list[SamplingMessage]] = [] + + def sampling_handler( + messages: list[SamplingMessage], params: SamplingParams, ctx: RequestContext + ) -> CreateMessageResultWithTools: + messages_received.append(list(messages)) + + if len(messages_received) == 1: + # Tools with different delays - later tools finish first + return CreateMessageResultWithTools( + role="assistant", + content=[ + ToolUseContent( + type="tool_use", + id="call_1", + name="tool_with_delay", + input={"value": 1, "delay": 0.15}, + ), + ToolUseContent( + type="tool_use", + id="call_2", + name="tool_with_delay", + input={"value": 2, "delay": 0.05}, + ), + ToolUseContent( + type="tool_use", + id="call_3", + name="tool_with_delay", + input={"value": 3, "delay": 0.1}, + ), + ], + model="test-model", + stopReason="toolUse", + ) + else: + return CreateMessageResultWithTools( + role="assistant", + content=[TextContent(type="text", text="Done!")], + model="test-model", + stopReason="endTurn", + ) + + mcp = FastMCP(sampling_handler=sampling_handler) + + @mcp.tool + async def test_tool(context: Context) -> str: + result = await context.sample( + messages="Run tools", + tools=[tool_with_delay], + tool_concurrency=0, # Parallel execution + ) + return result.text or "" + + async with Client(mcp) as client: + result = await client.call_tool("test_tool", {}) + + assert result.data == "Done!" + # Check that results are in the correct order (1, 2, 3) despite finishing order (2, 3, 1) + tool_result_message = messages_received[1][-1] + tool_results = cast(list[ToolResultContent], tool_result_message.content) + assert len(tool_results) == 3 + assert tool_results[0].toolUseId == "call_1" + assert tool_results[1].toolUseId == "call_2" + assert tool_results[2].toolUseId == "call_3" + # Check values are correct + result_texts = [cast(TextContent, r.content[0]).text for r in tool_results] + assert result_texts == ["1", "2", "3"] + class TestSamplingResultType: """Tests for result_type parameter (structured output).""" diff --git a/tests/deprecated/server/test_include_exclude_tags.py b/tests/deprecated/server/test_include_exclude_tags.py index 1bdf684324..70312b412d 100644 --- a/tests/deprecated/server/test_include_exclude_tags.py +++ b/tests/deprecated/server/test_include_exclude_tags.py @@ -1,68 +1,25 @@ -"""Tests for deprecated include_tags/exclude_tags parameters.""" +"""Tests for removed include_tags/exclude_tags parameters.""" import pytest from fastmcp import FastMCP -from fastmcp.server.transforms.visibility import Visibility -class TestIncludeExcludeTagsDeprecation: - """Test that include_tags/exclude_tags emit deprecation warnings but still work.""" +class TestIncludeExcludeTagsRemoved: + """Test that include_tags/exclude_tags raise TypeError with migration hints.""" - def test_exclude_tags_emits_warning(self): - """exclude_tags parameter emits deprecation warning.""" - with pytest.warns(DeprecationWarning, match="exclude_tags.*deprecated"): + def test_exclude_tags_raises_type_error(self): + with pytest.raises(TypeError, match="no longer accepts `exclude_tags`"): FastMCP(exclude_tags={"internal"}) - def test_include_tags_emits_warning(self): - """include_tags parameter emits deprecation warning.""" - with pytest.warns(DeprecationWarning, match="include_tags.*deprecated"): + def test_include_tags_raises_type_error(self): + with pytest.raises(TypeError, match="no longer accepts `include_tags`"): FastMCP(include_tags={"public"}) - def test_exclude_tags_still_works(self): - """exclude_tags adds a Visibility transform that disables matching tags.""" - with pytest.warns(DeprecationWarning): - mcp = FastMCP(exclude_tags={"internal"}) - - # Should have added a Visibility transform that disables the tag - enabled_transforms = [t for t in mcp._transforms if isinstance(t, Visibility)] - assert len(enabled_transforms) == 1 - e = enabled_transforms[0] - assert e._enabled is False - assert e.tags == {"internal"} - - def test_include_tags_still_works(self): - """include_tags adds Visibility transforms for allowlist mode.""" - with pytest.warns(DeprecationWarning): - mcp = FastMCP(include_tags={"public"}) - - # Should have added Visibility transforms for allowlist mode - # (one to disable all, one to enable matching) - enabled_transforms = [t for t in mcp._transforms if isinstance(t, Visibility)] - assert len(enabled_transforms) == 2 - - # First should disable all (Visibility.all(False)) - disable_all_transform = enabled_transforms[0] - assert disable_all_transform._enabled is False - assert disable_all_transform.match_all is True - - # Second should enable matching tags - enable_transform = enabled_transforms[1] - assert enable_transform._enabled is True - assert enable_transform.tags == {"public"} - - def test_exclude_and_include_both_create_transforms(self): - """exclude_tags and include_tags both create transforms.""" - with pytest.warns(DeprecationWarning): - mcp = FastMCP(include_tags={"public"}, exclude_tags={"deprecated"}) - - # Should have added transforms for both - # include_tags creates 2 (disable all + enable matching) - # exclude_tags creates 1 (disable matching) - enabled_transforms = [t for t in mcp._transforms if isinstance(t, Visibility)] - assert len(enabled_transforms) == 3 + def test_exclude_tags_error_mentions_disable(self): + with pytest.raises(TypeError, match="server.disable"): + FastMCP(exclude_tags={"internal"}) - # Check we have both tag rules - tags_in_transforms = [t.tags for t in enabled_transforms if t.tags] - assert {"public"} in tags_in_transforms - assert {"deprecated"} in tags_in_transforms + def test_include_tags_error_mentions_enable(self): + with pytest.raises(TypeError, match="server.enable"): + FastMCP(include_tags={"public"}) diff --git a/tests/deprecated/test_add_tool_transformation.py b/tests/deprecated/test_add_tool_transformation.py index 348247b9fc..0228b8b60f 100644 --- a/tests/deprecated/test_add_tool_transformation.py +++ b/tests/deprecated/test_add_tool_transformation.py @@ -68,37 +68,12 @@ async def test_remove_tool_transformation_emits_warning(self): assert "remove_tool_transformation is deprecated" in str(w[0].message) assert "no effect" in str(w[0].message) - async def test_tool_transformations_constructor_emits_warning(self): - """tool_transformations constructor param should emit deprecation warning.""" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") + async def test_tool_transformations_constructor_raises_type_error(self): + """tool_transformations constructor param should raise TypeError.""" + import pytest + + with pytest.raises(TypeError, match="no longer accepts `tool_transformations`"): FastMCP( "test", tool_transformations={"my_tool": ToolTransformConfig(name="renamed")}, ) - - assert len(w) == 1 - assert issubclass(w[0].category, DeprecationWarning) - assert "tool_transformations parameter is deprecated" in str(w[0].message) - - async def test_tool_transformations_constructor_still_works(self): - """tool_transformations constructor param should still apply transforms.""" - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - mcp = FastMCP( - "test", - tool_transformations={ - "my_tool": ToolTransformConfig(name="renamed_tool") - }, - ) - - @mcp.tool - def my_tool() -> str: - return "result" - - async with Client(mcp) as client: - tools = await client.list_tools() - tool_names = [t.name for t in tools] - - assert "my_tool" not in tool_names - assert "renamed_tool" in tool_names diff --git a/tests/deprecated/test_deprecated.py b/tests/deprecated/test_deprecated.py index 25a32aff66..37f33cf27a 100644 --- a/tests/deprecated/test_deprecated.py +++ b/tests/deprecated/test_deprecated.py @@ -1,48 +1,23 @@ -import warnings - import pytest from starlette.applications import Starlette from fastmcp import FastMCP -from fastmcp.utilities.tests import temporary_settings - -# reset deprecation warnings for this module -pytestmark = pytest.mark.filterwarnings("default::DeprecationWarning") - -class TestDeprecationWarningsSetting: - def test_deprecation_warnings_setting_true(self): - with temporary_settings(deprecation_warnings=True): - with pytest.warns(DeprecationWarning) as recorded_warnings: - # will warn once for providing deprecated arg - mcp = FastMCP(host="1.2.3.4") - # will warn once for accessing deprecated property - mcp.settings - assert len(recorded_warnings) == 2 +class TestRemovedKwargs: + def test_host_kwarg_raises_type_error(self): + with pytest.raises(TypeError, match="no longer accepts `host`"): + FastMCP(host="1.2.3.4") - def test_deprecation_warnings_setting_false(self): - with temporary_settings(deprecation_warnings=False): - # will error if a warning is raised - with warnings.catch_warnings(): - warnings.simplefilter("error") - # will warn once for providing deprecated arg - mcp = FastMCP(host="1.2.3.4") - # will warn once for accessing deprecated property - mcp.settings + def test_settings_property_removed(self): + mcp = FastMCP() + assert not hasattr(mcp, "_deprecated_settings") + with pytest.raises(AttributeError): + mcp.settings # noqa: B018 # ty: ignore[unresolved-attribute] def test_http_app_with_sse_transport(): - """Test that http_app with SSE transport works (no warning).""" + """Test that http_app with SSE transport works.""" server = FastMCP("TestServer") - - # This should not raise a warning since we're using the new API - with warnings.catch_warnings(record=True) as recorded_warnings: - app = server.http_app(transport="sse") - assert isinstance(app, Starlette) - - # Verify no deprecation warnings were raised for using transport parameter - deprecation_warnings = [ - w for w in recorded_warnings if issubclass(w.category, DeprecationWarning) - ] - assert len(deprecation_warnings) == 0 + app = server.http_app(transport="sse") + assert isinstance(app, Starlette) diff --git a/tests/deprecated/test_openapi_deprecations.py b/tests/deprecated/test_openapi_deprecations.py index d55f847367..b57611c6f5 100644 --- a/tests/deprecated/test_openapi_deprecations.py +++ b/tests/deprecated/test_openapi_deprecations.py @@ -5,34 +5,9 @@ import pytest -import fastmcp - pytestmark = pytest.mark.filterwarnings("default::DeprecationWarning") -class TestEnableNewOpenAPIParserDeprecation: - """Test enable_new_openapi_parser setting deprecation.""" - - def test_setting_true_emits_warning(self): - """Setting enable_new_openapi_parser=True should emit deprecation warning.""" - with pytest.warns( - DeprecationWarning, - match=r"enable_new_openapi_parser is deprecated.*now the default", - ): - fastmcp.settings.experimental.enable_new_openapi_parser = True - - def test_setting_false_no_warning(self): - """Setting enable_new_openapi_parser=False should not emit warning.""" - with warnings.catch_warnings(record=True) as recorded: - warnings.simplefilter("always") - fastmcp.settings.experimental.enable_new_openapi_parser = False - - deprecation_warnings = [ - w for w in recorded if issubclass(w.category, DeprecationWarning) - ] - assert len(deprecation_warnings) == 0 - - class TestExperimentalOpenAPIImportDeprecation: """Test experimental OpenAPI import path deprecations.""" diff --git a/tests/deprecated/test_settings.py b/tests/deprecated/test_settings.py index 301abf4102..47ea6613c5 100644 --- a/tests/deprecated/test_settings.py +++ b/tests/deprecated/test_settings.py @@ -1,319 +1,64 @@ -import warnings -from unittest.mock import patch - import pytest from fastmcp import FastMCP -# reset deprecation warnings for this module -pytestmark = pytest.mark.filterwarnings("default::DeprecationWarning") - - -class TestDeprecatedServerInitKwargs: - """Test deprecated server initialization keyword arguments.""" - - def test_log_level_deprecation_warning(self): - """Test that log_level raises a deprecation warning.""" - with pytest.warns( - DeprecationWarning, - match=r"Providing `log_level` when creating a server is deprecated\. Provide it when calling `run` or as a global setting instead\.", - ): - server = FastMCP("TestServer", log_level="DEBUG") - - # Verify the setting is still applied - assert server._deprecated_settings.log_level == "DEBUG" - - def test_debug_deprecation_warning(self): - """Test that debug raises a deprecation warning.""" - with pytest.warns( - DeprecationWarning, - match=r"Providing `debug` when creating a server is deprecated\. Provide it when calling `run` or as a global setting instead\.", - ): - server = FastMCP("TestServer", debug=True) - - # Verify the setting is still applied - assert server._deprecated_settings.debug is True - - def test_host_deprecation_warning(self): - """Test that host raises a deprecation warning.""" - with pytest.warns( - DeprecationWarning, - match=r"Providing `host` when creating a server is deprecated\. Provide it when calling `run` or as a global setting instead\.", - ): - server = FastMCP("TestServer", host="0.0.0.0") - - # Verify the setting is still applied - assert server._deprecated_settings.host == "0.0.0.0" - - def test_port_deprecation_warning(self): - """Test that port raises a deprecation warning.""" - with pytest.warns( - DeprecationWarning, - match=r"Providing `port` when creating a server is deprecated\. Provide it when calling `run` or as a global setting instead\.", - ): - server = FastMCP("TestServer", port=8080) - - # Verify the setting is still applied - assert server._deprecated_settings.port == 8080 - - def test_sse_path_deprecation_warning(self): - """Test that sse_path raises a deprecation warning.""" - with pytest.warns( - DeprecationWarning, - match=r"Providing `sse_path` when creating a server is deprecated\. Provide it when calling `run` or as a global setting instead\.", - ): - server = FastMCP("TestServer", sse_path="/custom-sse") - - # Verify the setting is still applied - assert server._deprecated_settings.sse_path == "/custom-sse" - - def test_message_path_deprecation_warning(self): - """Test that message_path raises a deprecation warning.""" - with pytest.warns( - DeprecationWarning, - match=r"Providing `message_path` when creating a server is deprecated\. Provide it when calling `run` or as a global setting instead\.", - ): - server = FastMCP("TestServer", message_path="/custom-message") - - # Verify the setting is still applied - assert server._deprecated_settings.message_path == "/custom-message" - - def test_streamable_http_path_deprecation_warning(self): - """Test that streamable_http_path raises a deprecation warning.""" - with pytest.warns( - DeprecationWarning, - match=r"Providing `streamable_http_path` when creating a server is deprecated\. Provide it when calling `run` or as a global setting instead\.", - ): - server = FastMCP("TestServer", streamable_http_path="/custom-http") - - # Verify the setting is still applied - assert server._deprecated_settings.streamable_http_path == "/custom-http" - - def test_json_response_deprecation_warning(self): - """Test that json_response raises a deprecation warning.""" - with pytest.warns( - DeprecationWarning, - match=r"Providing `json_response` when creating a server is deprecated\. Provide it when calling `run` or as a global setting instead\.", - ): - server = FastMCP("TestServer", json_response=True) - - # Verify the setting is still applied - assert server._deprecated_settings.json_response is True - - def test_stateless_http_deprecation_warning(self): - """Test that stateless_http raises a deprecation warning.""" - with pytest.warns( - DeprecationWarning, - match=r"Providing `stateless_http` when creating a server is deprecated\. Provide it when calling `run` or as a global setting instead\.", - ): - server = FastMCP("TestServer", stateless_http=True) - - # Verify the setting is still applied - assert server._deprecated_settings.stateless_http is True - - def test_multiple_deprecated_kwargs_warnings(self): - """Test that multiple deprecated kwargs each raise their own warning.""" - with warnings.catch_warnings(record=True) as recorded_warnings: - warnings.simplefilter("always") - server = FastMCP( - "TestServer", - log_level="INFO", - debug=False, - host="127.0.0.1", - port=9999, - sse_path="/sse/", - message_path="/msg", - streamable_http_path="/http", - json_response=False, - stateless_http=False, - ) - # Should have 9 deprecation warnings (one for each deprecated parameter) - deprecation_warnings = [ - w for w in recorded_warnings if issubclass(w.category, DeprecationWarning) - ] - assert len(deprecation_warnings) == 9 - - # Verify all expected parameters are mentioned in warnings - expected_params = { - "log_level", - "debug", - "host", - "port", - "sse_path", - "message_path", - "streamable_http_path", - "json_response", - "stateless_http", - } - mentioned_params = set() - for warning in deprecation_warnings: - message = str(warning.message) - for param in expected_params: - if f"Providing `{param}`" in message: - mentioned_params.add(param) - - assert mentioned_params == expected_params - - # Verify all settings are still applied - assert server._deprecated_settings.log_level == "INFO" - assert server._deprecated_settings.debug is False - assert server._deprecated_settings.host == "127.0.0.1" - assert server._deprecated_settings.port == 9999 - assert server._deprecated_settings.sse_path == "/sse/" - assert server._deprecated_settings.message_path == "/msg" - assert server._deprecated_settings.streamable_http_path == "/http" - assert server._deprecated_settings.json_response is False - assert server._deprecated_settings.stateless_http is False - - def test_non_deprecated_kwargs_no_warnings(self): - """Test that non-deprecated kwargs don't raise warnings.""" - with warnings.catch_warnings(record=True) as recorded_warnings: - warnings.simplefilter("always") - server = FastMCP( - name="TestServer", - instructions="Test instructions", - on_duplicate="warn", # New unified parameter - mask_error_details=True, - ) - - # Should have no deprecation warnings - deprecation_warnings = [ - w for w in recorded_warnings if issubclass(w.category, DeprecationWarning) - ] - assert len(deprecation_warnings) == 0 - - # Verify server was created successfully +class TestRemovedServerInitKwargs: + """Test that removed server initialization keyword arguments raise TypeError.""" + + @pytest.mark.parametrize( + "kwarg, value, expected_message", + [ + ("host", "0.0.0.0", "run_http_async"), + ("port", 8080, "run_http_async"), + ("sse_path", "/custom-sse", "FASTMCP_SSE_PATH"), + ("message_path", "/custom-message", "FASTMCP_MESSAGE_PATH"), + ("streamable_http_path", "/custom-http", "run_http_async"), + ("json_response", True, "run_http_async"), + ("stateless_http", True, "run_http_async"), + ("debug", True, "FASTMCP_DEBUG"), + ("log_level", "DEBUG", "run_http_async"), + ("on_duplicate_tools", "warn", "on_duplicate="), + ("on_duplicate_resources", "error", "on_duplicate="), + ("on_duplicate_prompts", "replace", "on_duplicate="), + ("tool_serializer", lambda x: str(x), "ToolResult"), + ("include_tags", {"public"}, "server.enable"), + ("exclude_tags", {"internal"}, "server.disable"), + ( + "tool_transformations", + {"my_tool": {"name": "renamed"}}, + "server.add_transform", + ), + ], + ) + def test_removed_kwarg_raises_type_error(self, kwarg, value, expected_message): + with pytest.raises(TypeError, match=f"no longer accepts `{kwarg}`"): + FastMCP("TestServer", **{kwarg: value}) + + @pytest.mark.parametrize( + "kwarg, value, expected_message", + [ + ("host", "0.0.0.0", "run_http_async"), + ("on_duplicate_tools", "warn", "on_duplicate="), + ("include_tags", {"public"}, "server.enable"), + ], + ) + def test_removed_kwarg_error_includes_migration_hint( + self, kwarg, value, expected_message + ): + with pytest.raises(TypeError, match=expected_message): + FastMCP("TestServer", **{kwarg: value}) + + def test_unknown_kwarg_raises_standard_type_error(self): + with pytest.raises(TypeError, match="unexpected keyword argument"): + FastMCP("TestServer", **{"totally_fake_param": True}) # ty: ignore[invalid-argument-type] + + def test_valid_kwargs_still_work(self): + server = FastMCP( + name="TestServer", + instructions="Test instructions", + on_duplicate="warn", + mask_error_details=True, + ) assert server.name == "TestServer" assert server.instructions == "Test instructions" - - def test_deprecated_duplicate_kwargs_raise_warnings(self): - """Test that deprecated on_duplicate_* kwargs raise warnings.""" - with warnings.catch_warnings(record=True) as recorded_warnings: - warnings.simplefilter("always") - FastMCP( - name="TestServer", - on_duplicate_tools="warn", - on_duplicate_resources="error", - on_duplicate_prompts="replace", - ) - - # Should have 3 deprecation warnings (one for each deprecated param) - deprecation_warnings = [ - w for w in recorded_warnings if issubclass(w.category, DeprecationWarning) - ] - assert len(deprecation_warnings) == 3 - - # Check warning messages - warning_messages = [str(w.message) for w in deprecation_warnings] - assert any("on_duplicate_tools" in msg for msg in warning_messages) - assert any("on_duplicate_resources" in msg for msg in warning_messages) - assert any("on_duplicate_prompts" in msg for msg in warning_messages) - - def test_none_values_no_warnings(self): - """Test that None values for deprecated kwargs don't raise warnings.""" - with warnings.catch_warnings(record=True) as recorded_warnings: - warnings.simplefilter("always") - FastMCP( - "TestServer", - log_level=None, - debug=None, - host=None, - port=None, - sse_path=None, - message_path=None, - streamable_http_path=None, - json_response=None, - stateless_http=None, - ) - - # Should have no deprecation warnings for None values - deprecation_warnings = [ - w for w in recorded_warnings if issubclass(w.category, DeprecationWarning) - ] - assert len(deprecation_warnings) == 0 - - def test_deprecated_settings_inheritance_from_global(self): - """Test that deprecated settings inherit from global settings when not provided.""" - # Mock fastmcp.settings to test inheritance - with patch("fastmcp.settings") as mock_settings: - mock_settings.model_dump.return_value = { - "log_level": "WARNING", - "debug": True, - "host": "0.0.0.0", - "port": 3000, - "sse_path": "/events", - "message_path": "/messages", - "streamable_http_path": "/stream", - "json_response": True, - "stateless_http": True, - } - - server = FastMCP("TestServer") - - # Verify settings are inherited from global settings - assert server._deprecated_settings.log_level == "WARNING" - assert server._deprecated_settings.debug is True - assert server._deprecated_settings.host == "0.0.0.0" - assert server._deprecated_settings.port == 3000 - assert server._deprecated_settings.sse_path == "/events" - assert server._deprecated_settings.message_path == "/messages" - assert server._deprecated_settings.streamable_http_path == "/stream" - assert server._deprecated_settings.json_response is True - assert server._deprecated_settings.stateless_http is True - - def test_deprecated_settings_override_global(self): - """Test that deprecated settings override global settings when provided.""" - # Mock fastmcp.settings to test override behavior - with patch("fastmcp.settings") as mock_settings: - mock_settings.model_dump.return_value = { - "log_level": "WARNING", - "debug": True, - "host": "0.0.0.0", - "port": 3000, - "sse_path": "/events", - "message_path": "/messages", - "streamable_http_path": "/stream", - "json_response": True, - "stateless_http": True, - } - - with warnings.catch_warnings(): - warnings.simplefilter("ignore") # Ignore warnings for this test - server = FastMCP( - "TestServer", - log_level="ERROR", - debug=False, - host="127.0.0.1", - port=8080, - ) - - # Verify provided settings override global settings - assert server._deprecated_settings.log_level == "ERROR" - assert server._deprecated_settings.debug is False - assert server._deprecated_settings.host == "127.0.0.1" - assert server._deprecated_settings.port == 8080 - # Non-overridden settings should still come from global - assert server._deprecated_settings.sse_path == "/events" - assert server._deprecated_settings.message_path == "/messages" - assert server._deprecated_settings.streamable_http_path == "/stream" - assert server._deprecated_settings.json_response is True - assert server._deprecated_settings.stateless_http is True - - def test_stacklevel_points_to_constructor_call(self): - """Test that deprecation warnings point to the FastMCP constructor call.""" - with warnings.catch_warnings(record=True) as recorded_warnings: - warnings.simplefilter("always") - - FastMCP("TestServer", log_level="DEBUG") - - # Should have exactly one deprecation warning - deprecation_warnings = [ - w for w in recorded_warnings if issubclass(w.category, DeprecationWarning) - ] - assert len(deprecation_warnings) == 1 - - # The warning should point to the server.py file where FastMCP.__init__ is called - # This verifies the stacklevel is working as intended (pointing to constructor) - warning = deprecation_warnings[0] - assert "server.py" in warning.filename diff --git a/tests/deprecated/test_tool_serializer.py b/tests/deprecated/test_tool_serializer.py index f90bf06cca..2b706ae746 100644 --- a/tests/deprecated/test_tool_serializer.py +++ b/tests/deprecated/test_tool_serializer.py @@ -143,15 +143,14 @@ def my_tool(x: int) -> int: with pytest.warns(DeprecationWarning, match="serializer.*deprecated"): provider.tool(my_tool, serializer=custom_serializer) - def test_fastmcp_tool_serializer_parameter_warning(self): - """Test that FastMCP tool_serializer parameter warns.""" + def test_fastmcp_tool_serializer_parameter_raises_type_error(self): + """Test that FastMCP tool_serializer parameter raises TypeError.""" def custom_serializer(data) -> str: return f"Custom: {data}" - with temporary_settings(deprecation_warnings=True): - with pytest.warns(DeprecationWarning, match="tool_serializer.*deprecated"): - FastMCP("TestServer", tool_serializer=custom_serializer) + with pytest.raises(TypeError, match="no longer accepts `tool_serializer`"): + FastMCP("TestServer", tool_serializer=custom_serializer) def test_transformed_tool_from_tool_serializer_warning(self): """Test that TransformedTool.from_tool warns when serializer is provided.""" diff --git a/tests/server/auth/providers/test_azure.py b/tests/server/auth/providers/test_azure.py index 6bf25a50df..0ea6166bf2 100644 --- a/tests/server/auth/providers/test_azure.py +++ b/tests/server/auth/providers/test_azure.py @@ -2,6 +2,8 @@ from urllib.parse import parse_qs, urlparse +import pytest +from key_value.aio.stores.memory import MemoryStore from mcp.server.auth.provider import AuthorizationParams from mcp.shared.auth import OAuthClientInformationFull from pydantic import AnyUrl @@ -14,10 +16,16 @@ from fastmcp.server.auth.providers.jwt import JWTVerifier, RSAKeyPair +@pytest.fixture +def memory_storage() -> MemoryStore: + """Provide a MemoryStore for tests to avoid SQLite initialization on Windows.""" + return MemoryStore() + + class TestAzureProvider: """Test Azure OAuth provider functionality.""" - def test_init_with_explicit_params(self): + def test_init_with_explicit_params(self, memory_storage: MemoryStore): """Test AzureProvider initialization with explicit parameters.""" provider = AzureProvider( client_id="12345678-1234-1234-1234-123456789012", @@ -26,6 +34,7 @@ def test_init_with_explicit_params(self): base_url="https://myserver.com", required_scopes=["read", "write"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) assert provider._upstream_client_id == "12345678-1234-1234-1234-123456789012" @@ -37,7 +46,7 @@ def test_init_with_explicit_params(self): parsed_token = urlparse(provider._upstream_token_endpoint) assert "87654321-4321-4321-4321-210987654321" in parsed_token.path - def test_init_defaults(self): + def test_init_defaults(self, memory_storage: MemoryStore): """Test that default values are applied correctly.""" provider = AzureProvider( client_id="test_client", @@ -46,13 +55,14 @@ def test_init_defaults(self): base_url="https://myserver.com", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Check defaults assert provider._redirect_path == "/auth/callback" # Azure provider defaults are set but we can't easily verify them without accessing internals - def test_offline_access_automatically_included(self): + def test_offline_access_automatically_included(self, memory_storage: MemoryStore): """Test that offline_access is automatically added to get refresh tokens.""" # Without specifying offline_access provider = AzureProvider( @@ -62,11 +72,12 @@ def test_offline_access_automatically_included(self): base_url="https://myserver.com", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) assert "offline_access" in provider.additional_authorize_scopes - def test_offline_access_not_duplicated(self): + def test_offline_access_not_duplicated(self, memory_storage: MemoryStore): """Test that offline_access is not duplicated if already specified.""" provider = AzureProvider( client_id="test_client", @@ -76,13 +87,14 @@ def test_offline_access_not_duplicated(self): required_scopes=["read"], additional_authorize_scopes=["User.Read", "offline_access"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Should appear exactly once assert provider.additional_authorize_scopes.count("offline_access") == 1 assert "User.Read" in provider.additional_authorize_scopes - def test_oauth_endpoints_configured_correctly(self): + def test_oauth_endpoints_configured_correctly(self, memory_storage: MemoryStore): """Test that OAuth endpoints are configured correctly.""" provider = AzureProvider( client_id="test_client", @@ -91,6 +103,7 @@ def test_oauth_endpoints_configured_correctly(self): base_url="https://myserver.com", required_scopes=["read"], jwt_signing_key="test_secret", + client_storage=memory_storage, ) # Check that endpoints use the correct Azure OAuth2 v2.0 endpoints with tenant @@ -106,7 +119,7 @@ def test_oauth_endpoints_configured_correctly(self): provider._upstream_revocation_endpoint is None ) # Azure doesn't support revocation - def test_special_tenant_values(self): + def test_special_tenant_values(self, memory_storage: MemoryStore): """Test that special tenant values are accepted.""" # Test with "organizations" provider1 = AzureProvider( @@ -116,6 +129,7 @@ def test_special_tenant_values(self): base_url="https://myserver.com", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) parsed = urlparse(provider1._upstream_authorization_endpoint) assert "/organizations/" in parsed.path @@ -128,11 +142,12 @@ def test_special_tenant_values(self): base_url="https://myserver.com", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) parsed = urlparse(provider2._upstream_authorization_endpoint) assert "/consumers/" in parsed.path - def test_azure_specific_scopes(self): + def test_azure_specific_scopes(self, memory_storage: MemoryStore): """Test handling of custom API scope formats.""" # Test that the provider accepts custom API scopes without error provider = AzureProvider( @@ -146,6 +161,7 @@ def test_azure_specific_scopes(self): "admin", ], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Provider should initialize successfully with these scopes @@ -158,7 +174,9 @@ def test_azure_specific_scopes(self): "admin", ] - def test_init_does_not_require_api_client_id_anymore(self): + def test_init_does_not_require_api_client_id_anymore( + self, memory_storage: MemoryStore + ): """API client ID is no longer required; audience is client_id.""" provider = AzureProvider( client_id="test_client", @@ -167,10 +185,13 @@ def test_init_does_not_require_api_client_id_anymore(self): base_url="https://myserver.com", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) assert provider is not None - def test_init_with_custom_audience_uses_jwt_verifier(self): + def test_init_with_custom_audience_uses_jwt_verifier( + self, memory_storage: MemoryStore + ): """When audience is provided, JWTVerifier is configured with JWKS and issuer.""" from fastmcp.server.auth.providers.jwt import JWTVerifier @@ -182,6 +203,7 @@ def test_init_with_custom_audience_uses_jwt_verifier(self): identifier_uri="api://my-api", required_scopes=[".default"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) assert provider._token_validator is not None @@ -197,7 +219,9 @@ def test_init_with_custom_audience_uses_jwt_verifier(self): # (Azure returns unprefixed scopes like ".default" in JWT tokens) assert verifier.required_scopes == [".default"] - async def test_authorize_filters_resource_and_stores_unprefixed_scopes(self): + async def test_authorize_filters_resource_and_stores_unprefixed_scopes( + self, memory_storage: MemoryStore + ): """authorize() should drop resource parameter and store unprefixed scopes for MCP clients.""" provider = AzureProvider( client_id="test_client", @@ -207,6 +231,7 @@ async def test_authorize_filters_resource_and_stores_unprefixed_scopes(self): required_scopes=["read", "write"], base_url="https://srv.example", jwt_signing_key="test-secret", + client_storage=memory_storage, ) await provider.register_client( @@ -264,7 +289,9 @@ async def test_authorize_filters_resource_and_stores_unprefixed_scopes(self): or "api://my-api/write" in upstream_url ) - async def test_authorize_appends_additional_scopes(self): + async def test_authorize_appends_additional_scopes( + self, memory_storage: MemoryStore + ): """authorize() should append additional_authorize_scopes to the authorization request.""" provider = AzureProvider( client_id="test_client", @@ -275,6 +302,7 @@ async def test_authorize_appends_additional_scopes(self): base_url="https://srv.example", additional_authorize_scopes=["Mail.Read", "User.Read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) await provider.register_client( @@ -326,7 +354,7 @@ async def test_authorize_appends_additional_scopes(self): assert "Mail.Read" in upstream_url assert "User.Read" in upstream_url - def test_base_authority_defaults_to_public_cloud(self): + def test_base_authority_defaults_to_public_cloud(self, memory_storage: MemoryStore): """Test that base_authority defaults to login.microsoftonline.com.""" provider = AzureProvider( client_id="test_client", @@ -335,6 +363,7 @@ def test_base_authority_defaults_to_public_cloud(self): base_url="https://myserver.com", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) assert ( @@ -355,7 +384,7 @@ def test_base_authority_defaults_to_public_cloud(self): == "https://login.microsoftonline.com/test-tenant/discovery/v2.0/keys" ) - def test_base_authority_azure_government(self): + def test_base_authority_azure_government(self, memory_storage: MemoryStore): """Test Azure Government endpoints with login.microsoftonline.us.""" provider = AzureProvider( client_id="test_client", @@ -365,6 +394,7 @@ def test_base_authority_azure_government(self): required_scopes=["read"], base_authority="login.microsoftonline.us", jwt_signing_key="test-secret", + client_storage=memory_storage, ) assert ( @@ -385,7 +415,7 @@ def test_base_authority_azure_government(self): == "https://login.microsoftonline.us/gov-tenant-id/discovery/v2.0/keys" ) - def test_base_authority_from_parameter(self): + def test_base_authority_from_parameter(self, memory_storage: MemoryStore): """Test that base_authority can be set via parameter.""" provider = AzureProvider( client_id="env-client-id", @@ -395,6 +425,7 @@ def test_base_authority_from_parameter(self): required_scopes=["read"], base_authority="login.microsoftonline.us", jwt_signing_key="test-secret", + client_storage=memory_storage, ) assert ( @@ -415,7 +446,9 @@ def test_base_authority_from_parameter(self): == "https://login.microsoftonline.us/env-tenant-id/discovery/v2.0/keys" ) - def test_base_authority_with_special_tenant_values(self): + def test_base_authority_with_special_tenant_values( + self, memory_storage: MemoryStore + ): """Test that base_authority works with special tenant values like 'organizations'.""" provider = AzureProvider( client_id="test_client", @@ -425,13 +458,16 @@ def test_base_authority_with_special_tenant_values(self): required_scopes=["read"], base_authority="login.microsoftonline.us", jwt_signing_key="test-secret", + client_storage=memory_storage, ) parsed = urlparse(provider._upstream_authorization_endpoint) assert parsed.netloc == "login.microsoftonline.us" assert "/organizations/" in parsed.path - def test_prepare_scopes_for_upstream_refresh_basic_prefixing(self): + def test_prepare_scopes_for_upstream_refresh_basic_prefixing( + self, memory_storage: MemoryStore + ): """Test that unprefixed scopes are correctly prefixed for Azure token refresh.""" provider = AzureProvider( client_id="test_client", @@ -441,6 +477,7 @@ def test_prepare_scopes_for_upstream_refresh_basic_prefixing(self): identifier_uri="api://my-api", required_scopes=["read", "write"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Unprefixed scopes from storage should be prefixed @@ -451,7 +488,9 @@ def test_prepare_scopes_for_upstream_refresh_basic_prefixing(self): assert "offline_access" in result # Auto-included for refresh tokens assert len(result) == 3 - def test_prepare_scopes_for_upstream_refresh_already_prefixed(self): + def test_prepare_scopes_for_upstream_refresh_already_prefixed( + self, memory_storage: MemoryStore + ): """Test that already-prefixed scopes remain unchanged.""" provider = AzureProvider( client_id="test_client", @@ -461,6 +500,7 @@ def test_prepare_scopes_for_upstream_refresh_already_prefixed(self): identifier_uri="api://my-api", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Already prefixed scopes should pass through unchanged @@ -473,7 +513,9 @@ def test_prepare_scopes_for_upstream_refresh_already_prefixed(self): assert "offline_access" in result # Auto-included for refresh tokens assert len(result) == 3 - def test_prepare_scopes_for_upstream_refresh_with_additional_scopes(self): + def test_prepare_scopes_for_upstream_refresh_with_additional_scopes( + self, memory_storage: MemoryStore + ): """Test that only OIDC scopes from additional_authorize_scopes are added. Azure only allows ONE resource per token request (AADSTS28000), so @@ -493,6 +535,7 @@ def test_prepare_scopes_for_upstream_refresh_with_additional_scopes(self): "offline_access", ], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Base scopes should be prefixed, only OIDC scopes appended @@ -508,6 +551,7 @@ def test_prepare_scopes_for_upstream_refresh_with_additional_scopes(self): def test_prepare_scopes_for_upstream_refresh_filters_duplicate_additional_scopes( self, + memory_storage: MemoryStore, ): """Test that accidentally stored additional_authorize_scopes are filtered out.""" provider = AzureProvider( @@ -519,6 +563,7 @@ def test_prepare_scopes_for_upstream_refresh_filters_duplicate_additional_scopes required_scopes=["read"], additional_authorize_scopes=["User.Read", "openid"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # If additional scopes were accidentally stored, they should be filtered @@ -535,7 +580,9 @@ def test_prepare_scopes_for_upstream_refresh_filters_duplicate_additional_scopes assert "offline_access" in result # Auto-included and is OIDC assert len(result) == 3 - def test_prepare_scopes_for_upstream_refresh_mixed_scopes(self): + def test_prepare_scopes_for_upstream_refresh_mixed_scopes( + self, memory_storage: MemoryStore + ): """Test mixed scenario with both prefixed and unprefixed scopes.""" provider = AzureProvider( client_id="test_client", @@ -546,6 +593,7 @@ def test_prepare_scopes_for_upstream_refresh_mixed_scopes(self): required_scopes=["read"], additional_authorize_scopes=["openid"], # OIDC scope jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Mix of prefixed and unprefixed scopes @@ -560,7 +608,9 @@ def test_prepare_scopes_for_upstream_refresh_mixed_scopes(self): assert "offline_access" in result # Auto-included assert len(result) == 5 - def test_prepare_scopes_for_upstream_refresh_scope_with_slash(self): + def test_prepare_scopes_for_upstream_refresh_scope_with_slash( + self, memory_storage: MemoryStore + ): """Test that scopes containing '/' are not prefixed.""" provider = AzureProvider( client_id="test_client", @@ -570,6 +620,7 @@ def test_prepare_scopes_for_upstream_refresh_scope_with_slash(self): identifier_uri="api://my-api", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Scopes with "/" should not be prefixed (already fully qualified) @@ -582,7 +633,9 @@ def test_prepare_scopes_for_upstream_refresh_scope_with_slash(self): "https://graph.microsoft.com/.default" in result ) # Not prefixed (contains ://) - def test_prepare_scopes_for_upstream_refresh_empty_scopes(self): + def test_prepare_scopes_for_upstream_refresh_empty_scopes( + self, memory_storage: MemoryStore + ): """Test behavior with empty scopes list.""" provider = AzureProvider( client_id="test_client", @@ -593,6 +646,7 @@ def test_prepare_scopes_for_upstream_refresh_empty_scopes(self): required_scopes=["read"], additional_authorize_scopes=["User.Read", "openid"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Empty scopes should still add OIDC scopes (not User.Read) @@ -603,7 +657,9 @@ def test_prepare_scopes_for_upstream_refresh_empty_scopes(self): assert "offline_access" in result # Auto-included assert len(result) == 2 # Only OIDC scopes: openid + offline_access - def test_prepare_scopes_for_upstream_refresh_no_additional_scopes(self): + def test_prepare_scopes_for_upstream_refresh_no_additional_scopes( + self, memory_storage: MemoryStore + ): """Test behavior when no additional_authorize_scopes are configured.""" provider = AzureProvider( client_id="test_client", @@ -613,6 +669,7 @@ def test_prepare_scopes_for_upstream_refresh_no_additional_scopes(self): identifier_uri="api://my-api", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Should prefix base scopes, plus auto-added offline_access @@ -623,7 +680,9 @@ def test_prepare_scopes_for_upstream_refresh_no_additional_scopes(self): assert "offline_access" in result # Auto-included assert len(result) == 3 - def test_prepare_scopes_for_upstream_refresh_deduplicates_scopes(self): + def test_prepare_scopes_for_upstream_refresh_deduplicates_scopes( + self, memory_storage: MemoryStore + ): """Test that duplicate scopes are deduplicated while preserving order.""" provider = AzureProvider( client_id="test_client", @@ -634,6 +693,7 @@ def test_prepare_scopes_for_upstream_refresh_deduplicates_scopes(self): required_scopes=["read"], additional_authorize_scopes=["openid", "profile"], # OIDC scopes only jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Test with duplicate base scopes @@ -651,7 +711,9 @@ def test_prepare_scopes_for_upstream_refresh_deduplicates_scopes(self): ] assert len(result) == 5 - def test_prepare_scopes_for_upstream_refresh_deduplicates_prefixed_variants(self): + def test_prepare_scopes_for_upstream_refresh_deduplicates_prefixed_variants( + self, memory_storage: MemoryStore + ): """Test that both prefixed and unprefixed variants are deduplicated.""" provider = AzureProvider( client_id="test_client", @@ -661,6 +723,7 @@ def test_prepare_scopes_for_upstream_refresh_deduplicates_prefixed_variants(self identifier_uri="api://my-api", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Test with both prefixed and unprefixed variants of same scope @@ -688,11 +751,13 @@ class TestOIDCScopeHandling: 3. OIDC scopes are still advertised to clients via valid_scopes """ - def test_oidc_scopes_constant(self): + def test_oidc_scopes_constant(self, memory_storage: MemoryStore): """Verify OIDC_SCOPES contains the standard OIDC scopes.""" assert OIDC_SCOPES == {"openid", "profile", "email", "offline_access"} - def test_prefix_scopes_does_not_prefix_oidc_scopes(self): + def test_prefix_scopes_does_not_prefix_oidc_scopes( + self, memory_storage: MemoryStore + ): """Test that _prefix_scopes_for_azure never prefixes OIDC scopes.""" provider = AzureProvider( client_id="test_client", @@ -702,6 +767,7 @@ def test_prefix_scopes_does_not_prefix_oidc_scopes(self): identifier_uri="api://my-api", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # All OIDC scopes should pass through unchanged @@ -711,7 +777,7 @@ def test_prefix_scopes_does_not_prefix_oidc_scopes(self): assert result == ["openid", "profile", "email", "offline_access"] - def test_prefix_scopes_mixed_oidc_and_custom(self): + def test_prefix_scopes_mixed_oidc_and_custom(self, memory_storage: MemoryStore): """Test prefixing with a mix of OIDC and custom scopes.""" provider = AzureProvider( client_id="test_client", @@ -721,6 +787,7 @@ def test_prefix_scopes_mixed_oidc_and_custom(self): identifier_uri="api://my-api", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) result = provider._prefix_scopes_for_azure( @@ -736,7 +803,9 @@ def test_prefix_scopes_mixed_oidc_and_custom(self): assert "api://my-api/openid" not in result assert "api://my-api/profile" not in result - def test_prefix_scopes_dot_notation_gets_prefixed(self): + def test_prefix_scopes_dot_notation_gets_prefixed( + self, memory_storage: MemoryStore + ): """Test that dot-notation scopes get prefixed (use additional_authorize_scopes for Graph).""" provider = AzureProvider( client_id="test_client", @@ -746,6 +815,7 @@ def test_prefix_scopes_dot_notation_gets_prefixed(self): identifier_uri="api://my-api", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Dot-notation scopes ARE prefixed - use additional_authorize_scopes for Graph @@ -754,7 +824,9 @@ def test_prefix_scopes_dot_notation_gets_prefixed(self): assert result == ["api://my-api/my.scope", "api://my-api/admin.read"] - def test_prefix_scopes_fully_qualified_graph_not_prefixed(self): + def test_prefix_scopes_fully_qualified_graph_not_prefixed( + self, memory_storage: MemoryStore + ): """Test that fully-qualified Graph scopes are not prefixed.""" provider = AzureProvider( client_id="test_client", @@ -764,6 +836,7 @@ def test_prefix_scopes_fully_qualified_graph_not_prefixed(self): identifier_uri="api://my-api", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) result = provider._prefix_scopes_for_azure( @@ -779,7 +852,9 @@ def test_prefix_scopes_fully_qualified_graph_not_prefixed(self): "https://graph.microsoft.com/Mail.Send", ] - def test_required_scopes_with_oidc_filters_validation(self): + def test_required_scopes_with_oidc_filters_validation( + self, memory_storage: MemoryStore + ): """Test that OIDC scopes in required_scopes are filtered from token validation.""" provider = AzureProvider( client_id="test_client", @@ -789,12 +864,15 @@ def test_required_scopes_with_oidc_filters_validation(self): identifier_uri="api://my-api", required_scopes=["read", "openid", "profile"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Token validator should only require non-OIDC scopes assert provider._token_validator.required_scopes == ["read"] - def test_required_scopes_all_oidc_results_in_no_validation(self): + def test_required_scopes_all_oidc_results_in_no_validation( + self, memory_storage: MemoryStore + ): """Test that if all required_scopes are OIDC, no scope validation occurs.""" provider = AzureProvider( client_id="test_client", @@ -804,12 +882,13 @@ def test_required_scopes_all_oidc_results_in_no_validation(self): identifier_uri="api://my-api", required_scopes=["openid", "profile"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Token validator should have empty required scopes (all were OIDC) assert provider._token_validator.required_scopes == [] - def test_valid_scopes_includes_oidc_scopes(self): + def test_valid_scopes_includes_oidc_scopes(self, memory_storage: MemoryStore): """Test that valid_scopes advertises OIDC scopes to clients.""" provider = AzureProvider( client_id="test_client", @@ -819,6 +898,7 @@ def test_valid_scopes_includes_oidc_scopes(self): identifier_uri="api://my-api", required_scopes=["read", "openid", "profile"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # required_scopes (used for validation) excludes OIDC scopes @@ -831,7 +911,9 @@ def test_valid_scopes_includes_oidc_scopes(self): "profile", ] - def test_prepare_scopes_for_refresh_handles_oidc_scopes(self): + def test_prepare_scopes_for_refresh_handles_oidc_scopes( + self, memory_storage: MemoryStore + ): """Test that token refresh correctly handles OIDC scopes.""" provider = AzureProvider( client_id="test_client", @@ -841,6 +923,7 @@ def test_prepare_scopes_for_refresh_handles_oidc_scopes(self): identifier_uri="api://my-api", required_scopes=["read"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Simulate stored scopes that include OIDC scopes @@ -864,7 +947,7 @@ class TestAzureTokenExchangeScopes: properly prefixed scopes. """ - def test_prepare_scopes_returns_prefixed_scopes(self): + def test_prepare_scopes_returns_prefixed_scopes(self, memory_storage: MemoryStore): """Test that _prepare_scopes_for_token_exchange returns prefixed scopes.""" provider = AzureProvider( client_id="test_client", @@ -874,6 +957,7 @@ def test_prepare_scopes_returns_prefixed_scopes(self): identifier_uri="api://my-api", required_scopes=["read", "write"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) scopes = provider._prepare_scopes_for_token_exchange(["read", "write"]) @@ -881,7 +965,9 @@ def test_prepare_scopes_returns_prefixed_scopes(self): assert "api://my-api/read" in scopes assert "api://my-api/write" in scopes - def test_prepare_scopes_includes_additional_oidc_scopes(self): + def test_prepare_scopes_includes_additional_oidc_scopes( + self, memory_storage: MemoryStore + ): """Test that _prepare_scopes_for_token_exchange includes OIDC scopes.""" provider = AzureProvider( client_id="test_client", @@ -892,6 +978,7 @@ def test_prepare_scopes_includes_additional_oidc_scopes(self): required_scopes=["read"], additional_authorize_scopes=["openid", "profile", "offline_access"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) scopes = provider._prepare_scopes_for_token_exchange(["read"]) @@ -901,7 +988,9 @@ def test_prepare_scopes_includes_additional_oidc_scopes(self): assert "profile" in scopes assert "offline_access" in scopes - def test_prepare_scopes_excludes_other_api_scopes(self): + def test_prepare_scopes_excludes_other_api_scopes( + self, memory_storage: MemoryStore + ): """Test token exchange excludes other API scopes (Azure AADSTS28000). Azure only allows ONE resource per token exchange. Other API scopes @@ -921,6 +1010,7 @@ def test_prepare_scopes_excludes_other_api_scopes(self): "api://11111111-2222-3333-4444-555555555555/user_impersonation", ], jwt_signing_key="test-secret", + client_storage=memory_storage, ) scopes = provider._prepare_scopes_for_token_exchange(["user_impersonation"]) @@ -935,7 +1025,7 @@ def test_prepare_scopes_excludes_other_api_scopes(self): assert not any("api://aaaaaaaa" in s for s in scopes) assert not any("api://11111111" in s for s in scopes) - def test_prepare_scopes_deduplicates_scopes(self): + def test_prepare_scopes_deduplicates_scopes(self, memory_storage: MemoryStore): """Test that duplicate scopes are deduplicated.""" provider = AzureProvider( client_id="test_client", @@ -946,6 +1036,7 @@ def test_prepare_scopes_deduplicates_scopes(self): required_scopes=["read"], additional_authorize_scopes=["api://my-api/read", "openid"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Pass a scope that will be prefixed to match one in additional_authorize_scopes @@ -955,7 +1046,9 @@ def test_prepare_scopes_deduplicates_scopes(self): assert scopes.count("api://my-api/read") == 1 assert "openid" in scopes - def test_extra_token_params_does_not_contain_scope(self): + def test_extra_token_params_does_not_contain_scope( + self, memory_storage: MemoryStore + ): """Test that extra_token_params doesn't contain scope to avoid TypeError. Previously, Azure provider set extra_token_params={"scope": ...} during init. @@ -974,6 +1067,7 @@ def test_extra_token_params_does_not_contain_scope(self): required_scopes=["read", "write"], additional_authorize_scopes=["openid", "profile", "offline_access"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # extra_token_params should NOT contain "scope" to avoid TypeError during refresh @@ -1122,3 +1216,99 @@ def test_specific_tenant_sets_issuer(self): verifier.issuer == "https://login.microsoftonline.com/12345678-1234-1234-1234-123456789012/v2.0" ) + + +class TestAzureOBOIntegration: + """Tests for azure.identity OBO integration (create_obo_credential, EntraOBOToken).""" + + def test_create_obo_credential_returns_configured_credential(self): + """Test that create_obo_credential returns a properly configured credential.""" + from unittest.mock import MagicMock, patch + + provider = AzureProvider( + client_id="test-client-id", + client_secret="test-client-secret", + tenant_id="test-tenant-id", + base_url="https://myserver.com", + required_scopes=["read"], + jwt_signing_key="test-secret", + ) + + mock_credential = MagicMock() + with patch( + "azure.identity.aio.OnBehalfOfCredential", return_value=mock_credential + ) as mock_class: + credential = provider.create_obo_credential(user_assertion="user-token-123") + + mock_class.assert_called_once_with( + tenant_id="test-tenant-id", + client_id="test-client-id", + client_secret="test-client-secret", + user_assertion="user-token-123", + authority="https://login.microsoftonline.com", + ) + assert credential is mock_credential + + def test_create_obo_credential_with_custom_authority(self): + """Test that create_obo_credential uses custom base_authority.""" + from unittest.mock import MagicMock, patch + + provider = AzureProvider( + client_id="test-client-id", + client_secret="test-client-secret", + tenant_id="gov-tenant-id", + base_url="https://myserver.com", + required_scopes=["read"], + base_authority="login.microsoftonline.us", + jwt_signing_key="test-secret", + ) + + mock_credential = MagicMock() + with patch( + "azure.identity.aio.OnBehalfOfCredential", return_value=mock_credential + ) as mock_class: + provider.create_obo_credential(user_assertion="user-token") + + call_kwargs = mock_class.call_args[1] + assert call_kwargs["authority"] == "https://login.microsoftonline.us" + + def test_tenant_and_authority_stored_as_attributes(self): + """Test that tenant_id and base_authority are stored for OBO credential creation.""" + provider = AzureProvider( + client_id="test-client-id", + client_secret="test-client-secret", + tenant_id="my-tenant", + base_url="https://myserver.com", + required_scopes=["read"], + base_authority="login.microsoftonline.us", + jwt_signing_key="test-secret", + ) + + assert provider._tenant_id == "my-tenant" + assert provider._base_authority == "login.microsoftonline.us" + + def test_entra_obo_token_is_importable(self): + """Test that EntraOBOToken can be imported.""" + from fastmcp.server.auth.providers.azure import EntraOBOToken + + assert EntraOBOToken is not None + + def test_entra_obo_token_creates_dependency(self): + """Test that EntraOBOToken creates a dependency with scopes.""" + from fastmcp.server.auth.providers.azure import EntraOBOToken, _EntraOBOToken + + dep = EntraOBOToken(["https://graph.microsoft.com/User.Read"]) + assert isinstance(dep, _EntraOBOToken) + assert dep.scopes == ["https://graph.microsoft.com/User.Read"] + + def test_entra_obo_token_is_dependency_instance(self): + """Test that EntraOBOToken is a Dependency instance.""" + try: + from docket.dependencies import Dependency + except ImportError: + from fastmcp._vendor.docket_di import Dependency + + from fastmcp.server.auth.providers.azure import _EntraOBOToken + + dep = _EntraOBOToken(["scope"]) + assert isinstance(dep, Dependency) diff --git a/tests/server/auth/providers/test_discord.py b/tests/server/auth/providers/test_discord.py index 8d79265e6b..509eb08260 100644 --- a/tests/server/auth/providers/test_discord.py +++ b/tests/server/auth/providers/test_discord.py @@ -1,12 +1,21 @@ """Tests for Discord OAuth provider.""" +import pytest +from key_value.aio.stores.memory import MemoryStore + from fastmcp.server.auth.providers.discord import DiscordProvider +@pytest.fixture +def memory_storage() -> MemoryStore: + """Provide a MemoryStore for tests to avoid SQLite initialization on Windows.""" + return MemoryStore() + + class TestDiscordProvider: """Test Discord OAuth provider functionality.""" - def test_init_with_explicit_params(self): + def test_init_with_explicit_params(self, memory_storage: MemoryStore): """Test DiscordProvider initialization with explicit parameters.""" provider = DiscordProvider( client_id="env_client_id", @@ -14,31 +23,34 @@ def test_init_with_explicit_params(self): base_url="https://myserver.com", required_scopes=["email", "identify"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) assert provider._upstream_client_id == "env_client_id" assert provider._upstream_client_secret.get_secret_value() == "GOCSPX-test123" assert str(provider.base_url) == "https://myserver.com/" - def test_init_defaults(self): + def test_init_defaults(self, memory_storage: MemoryStore): """Test that default values are applied correctly.""" provider = DiscordProvider( client_id="env_client_id", client_secret="GOCSPX-test123", base_url="https://myserver.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Check defaults assert provider._redirect_path == "/auth/callback" - def test_oauth_endpoints_configured_correctly(self): + def test_oauth_endpoints_configured_correctly(self, memory_storage: MemoryStore): """Test that OAuth endpoints are configured correctly.""" provider = DiscordProvider( client_id="env_client_id", client_secret="GOCSPX-test123", base_url="https://myserver.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Check that endpoints use Discord's OAuth2 endpoints @@ -52,7 +64,7 @@ def test_oauth_endpoints_configured_correctly(self): # Discord provider doesn't currently set a revocation endpoint assert provider._upstream_revocation_endpoint is None - def test_discord_specific_scopes(self): + def test_discord_specific_scopes(self, memory_storage: MemoryStore): """Test handling of Discord-specific scope formats.""" # Just test that the provider accepts Discord-specific scopes without error provider = DiscordProvider( @@ -64,6 +76,7 @@ def test_discord_specific_scopes(self): "email", ], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Provider should initialize successfully with these scopes diff --git a/tests/server/auth/providers/test_github.py b/tests/server/auth/providers/test_github.py index e2fcdaa256..fe2bbf0311 100644 --- a/tests/server/auth/providers/test_github.py +++ b/tests/server/auth/providers/test_github.py @@ -2,16 +2,25 @@ from unittest.mock import MagicMock, patch +import pytest +from key_value.aio.stores.memory import MemoryStore + from fastmcp.server.auth.providers.github import ( GitHubProvider, GitHubTokenVerifier, ) +@pytest.fixture +def memory_storage() -> MemoryStore: + """Provide a MemoryStore for tests to avoid SQLite initialization on Windows.""" + return MemoryStore() + + class TestGitHubProvider: """Test GitHubProvider initialization.""" - def test_init_with_explicit_params(self): + def test_init_with_explicit_params(self, memory_storage: MemoryStore): """Test initialization with explicit parameters.""" provider = GitHubProvider( client_id="test_client", @@ -21,6 +30,7 @@ def test_init_with_explicit_params(self): required_scopes=["user", "repo"], timeout_seconds=30, jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Check that the provider was initialized correctly @@ -31,13 +41,14 @@ def test_init_with_explicit_params(self): ) # URLs get normalized with trailing slash assert provider._redirect_path == "/custom/callback" - def test_init_defaults(self): + def test_init_defaults(self, memory_storage: MemoryStore): """Test that default values are applied correctly.""" provider = GitHubProvider( client_id="test_client", client_secret="test_secret", base_url="https://example.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Check defaults @@ -49,7 +60,7 @@ def test_init_defaults(self): class TestGitHubTokenVerifier: """Test GitHubTokenVerifier.""" - def test_init_with_custom_scopes(self): + def test_init_with_custom_scopes(self, memory_storage: MemoryStore): """Test initialization with custom required scopes.""" verifier = GitHubTokenVerifier( required_scopes=["user", "repo"], @@ -59,7 +70,7 @@ def test_init_with_custom_scopes(self): assert verifier.required_scopes == ["user", "repo"] assert verifier.timeout_seconds == 30 - def test_init_defaults(self): + def test_init_defaults(self, memory_storage: MemoryStore): """Test initialization with defaults.""" verifier = GitHubTokenVerifier() diff --git a/tests/server/auth/providers/test_google.py b/tests/server/auth/providers/test_google.py index d578c70566..0f6bd6c895 100644 --- a/tests/server/auth/providers/test_google.py +++ b/tests/server/auth/providers/test_google.py @@ -1,12 +1,21 @@ """Tests for Google OAuth provider.""" +import pytest +from key_value.aio.stores.memory import MemoryStore + from fastmcp.server.auth.providers.google import GoogleProvider +@pytest.fixture +def memory_storage() -> MemoryStore: + """Provide a MemoryStore for tests to avoid SQLite initialization on Windows.""" + return MemoryStore() + + class TestGoogleProvider: """Test Google OAuth provider functionality.""" - def test_init_with_explicit_params(self): + def test_init_with_explicit_params(self, memory_storage: MemoryStore): """Test GoogleProvider initialization with explicit parameters.""" provider = GoogleProvider( client_id="123456789.apps.googleusercontent.com", @@ -14,32 +23,35 @@ def test_init_with_explicit_params(self): base_url="https://myserver.com", required_scopes=["openid", "email", "profile"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) assert provider._upstream_client_id == "123456789.apps.googleusercontent.com" assert provider._upstream_client_secret.get_secret_value() == "GOCSPX-test123" assert str(provider.base_url) == "https://myserver.com/" - def test_init_defaults(self): + def test_init_defaults(self, memory_storage: MemoryStore): """Test that default values are applied correctly.""" provider = GoogleProvider( client_id="123456789.apps.googleusercontent.com", client_secret="GOCSPX-test123", base_url="https://myserver.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Check defaults assert provider._redirect_path == "/auth/callback" # Google provider has ["openid"] as default but we can't easily verify without accessing internals - def test_oauth_endpoints_configured_correctly(self): + def test_oauth_endpoints_configured_correctly(self, memory_storage: MemoryStore): """Test that OAuth endpoints are configured correctly.""" provider = GoogleProvider( client_id="123456789.apps.googleusercontent.com", client_secret="GOCSPX-test123", base_url="https://myserver.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Check that endpoints use Google's OAuth2 endpoints @@ -53,7 +65,7 @@ def test_oauth_endpoints_configured_correctly(self): # Google provider doesn't currently set a revocation endpoint assert provider._upstream_revocation_endpoint is None - def test_google_specific_scopes(self): + def test_google_specific_scopes(self, memory_storage: MemoryStore): """Test handling of Google-specific scope formats.""" # Just test that the provider accepts Google-specific scopes without error provider = GoogleProvider( @@ -66,18 +78,20 @@ def test_google_specific_scopes(self): "https://www.googleapis.com/auth/userinfo.profile", ], jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Provider should initialize successfully with these scopes assert provider is not None - def test_extra_authorize_params_defaults(self): + def test_extra_authorize_params_defaults(self, memory_storage: MemoryStore): """Test that Google-specific defaults are set for refresh token support.""" provider = GoogleProvider( client_id="123456789.apps.googleusercontent.com", client_secret="GOCSPX-test123", base_url="https://myserver.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Should have Google-specific defaults for refresh token support @@ -86,7 +100,9 @@ def test_extra_authorize_params_defaults(self): "prompt": "consent", } - def test_extra_authorize_params_override_defaults(self): + def test_extra_authorize_params_override_defaults( + self, memory_storage: MemoryStore + ): """Test that user can override default extra authorize params.""" provider = GoogleProvider( client_id="123456789.apps.googleusercontent.com", @@ -94,6 +110,7 @@ def test_extra_authorize_params_override_defaults(self): base_url="https://myserver.com", jwt_signing_key="test-secret", extra_authorize_params={"prompt": "select_account"}, + client_storage=memory_storage, ) # User override should replace the default @@ -101,7 +118,7 @@ def test_extra_authorize_params_override_defaults(self): # But other defaults should remain assert provider._extra_authorize_params["access_type"] == "offline" - def test_extra_authorize_params_add_new_params(self): + def test_extra_authorize_params_add_new_params(self, memory_storage: MemoryStore): """Test that user can add additional authorize params.""" provider = GoogleProvider( client_id="123456789.apps.googleusercontent.com", @@ -109,6 +126,7 @@ def test_extra_authorize_params_add_new_params(self): base_url="https://myserver.com", jwt_signing_key="test-secret", extra_authorize_params={"login_hint": "user@example.com"}, + client_storage=memory_storage, ) # New param should be added diff --git a/tests/server/auth/providers/test_workos.py b/tests/server/auth/providers/test_workos.py index 69ee180124..594f2e5b5b 100644 --- a/tests/server/auth/providers/test_workos.py +++ b/tests/server/auth/providers/test_workos.py @@ -4,6 +4,7 @@ import httpx import pytest +from key_value.aio.stores.memory import MemoryStore from fastmcp import Client, FastMCP from fastmcp.client.transports import StreamableHttpTransport @@ -11,10 +12,16 @@ from fastmcp.utilities.tests import HeadlessOAuth, run_server_async +@pytest.fixture +def memory_storage() -> MemoryStore: + """Provide a MemoryStore for tests to avoid SQLite initialization on Windows.""" + return MemoryStore() + + class TestWorkOSProvider: """Test WorkOS OAuth provider functionality.""" - def test_init_with_explicit_params(self): + def test_init_with_explicit_params(self, memory_storage: MemoryStore): """Test WorkOSProvider initialization with explicit parameters.""" provider = WorkOSProvider( client_id="client_test123", @@ -23,13 +30,14 @@ def test_init_with_explicit_params(self): base_url="https://myserver.com", required_scopes=["openid", "profile"], jwt_signing_key="test-secret", + client_storage=memory_storage, ) assert provider._upstream_client_id == "client_test123" assert provider._upstream_client_secret.get_secret_value() == "secret_test456" assert str(provider.base_url) == "https://myserver.com/" - def test_authkit_domain_https_prefix_handling(self): + def test_authkit_domain_https_prefix_handling(self, memory_storage: MemoryStore): """Test that authkit_domain handles missing https:// prefix.""" # Without https:// - should add it provider1 = WorkOSProvider( @@ -38,6 +46,7 @@ def test_authkit_domain_https_prefix_handling(self): authkit_domain="test.authkit.app", base_url="https://myserver.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) parsed = urlparse(provider1._upstream_authorization_endpoint) assert parsed.scheme == "https" @@ -51,6 +60,7 @@ def test_authkit_domain_https_prefix_handling(self): authkit_domain="https://test.authkit.app", base_url="https://myserver.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) parsed = urlparse(provider2._upstream_authorization_endpoint) assert parsed.scheme == "https" @@ -64,13 +74,14 @@ def test_authkit_domain_https_prefix_handling(self): authkit_domain="http://localhost:8080", base_url="https://myserver.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) parsed = urlparse(provider3._upstream_authorization_endpoint) assert parsed.scheme == "http" assert parsed.netloc == "localhost:8080" assert parsed.path == "/oauth2/authorize" - def test_init_defaults(self): + def test_init_defaults(self, memory_storage: MemoryStore): """Test that default values are applied correctly.""" provider = WorkOSProvider( client_id="test_client", @@ -78,13 +89,14 @@ def test_init_defaults(self): authkit_domain="https://test.authkit.app", base_url="https://myserver.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Check defaults assert provider._redirect_path == "/auth/callback" # WorkOS provider has no default scopes but we can't easily verify without accessing internals - def test_oauth_endpoints_configured_correctly(self): + def test_oauth_endpoints_configured_correctly(self, memory_storage: MemoryStore): """Test that OAuth endpoints are configured correctly.""" provider = WorkOSProvider( client_id="test_client", @@ -92,6 +104,7 @@ def test_oauth_endpoints_configured_correctly(self): authkit_domain="https://test.authkit.app", base_url="https://myserver.com", jwt_signing_key="test-secret", + client_storage=memory_storage, ) # Check that endpoints use the authkit domain @@ -135,7 +148,9 @@ def client_with_headless_oauth(mcp_server_url: str) -> Client: class TestAuthKitProvider: - async def test_unauthorized_access(self, mcp_server_url: str): + async def test_unauthorized_access( + self, memory_storage: MemoryStore, mcp_server_url: str + ): with pytest.raises(httpx.HTTPStatusError) as exc_info: async with Client(mcp_server_url) as client: tools = await client.list_tools() # noqa: F841 diff --git a/tests/server/auth/test_authorization.py b/tests/server/auth/test_authorization.py index 6bab4cecdf..4bd0dff9ab 100644 --- a/tests/server/auth/test_authorization.py +++ b/tests/server/auth/test_authorization.py @@ -120,31 +120,31 @@ def test_allows_access_when_tag_present_with_scope(self): class TestRunAuthChecks: - def test_single_check_passes(self): + async def test_single_check_passes(self): ctx = AuthContext(token=make_token(scopes=["test"]), component=make_tool()) - assert run_auth_checks(require_scopes("test"), ctx) is True + assert await run_auth_checks(require_scopes("test"), ctx) is True - def test_single_check_fails(self): + async def test_single_check_fails(self): ctx = AuthContext(token=None, component=make_tool()) - assert run_auth_checks(require_scopes("test"), ctx) is False + assert await run_auth_checks(require_scopes("test"), ctx) is False - def test_multiple_checks_all_pass(self): + async def test_multiple_checks_all_pass(self): token = make_token(scopes=["test", "admin"]) ctx = AuthContext(token=token, component=make_tool()) checks = [require_scopes("test"), require_scopes("admin")] - assert run_auth_checks(checks, ctx) is True + assert await run_auth_checks(checks, ctx) is True - def test_multiple_checks_one_fails(self): + async def test_multiple_checks_one_fails(self): token = make_token(scopes=["read"]) ctx = AuthContext(token=token, component=make_tool()) checks = [require_scopes("read"), require_scopes("admin")] - assert run_auth_checks(checks, ctx) is False + assert await run_auth_checks(checks, ctx) is False - def test_empty_list_passes(self): + async def test_empty_list_passes(self): ctx = AuthContext(token=None, component=make_tool()) - assert run_auth_checks([], ctx) is True + assert await run_auth_checks([], ctx) is True - def test_custom_lambda_check(self): + async def test_custom_lambda_check(self): token = make_token() token.claims = {"level": 5} ctx = AuthContext(token=token, component=make_tool()) @@ -152,9 +152,9 @@ def test_custom_lambda_check(self): def check(ctx: AuthContext) -> bool: return ctx.token is not None and ctx.token.claims.get("level", 0) >= 3 - assert run_auth_checks(check, ctx) is True + assert await run_auth_checks(check, ctx) is True - def test_authorization_error_propagates(self): + async def test_authorization_error_propagates(self): """AuthorizationError from auth check should propagate with custom message.""" from fastmcp.exceptions import AuthorizationError @@ -163,9 +163,9 @@ def custom_auth_check(ctx: AuthContext) -> bool: ctx = AuthContext(token=make_token(), component=make_tool()) with pytest.raises(AuthorizationError, match="Custom denial reason"): - run_auth_checks(custom_auth_check, ctx) + await run_auth_checks(custom_auth_check, ctx) - def test_generic_exception_is_masked(self): + async def test_generic_exception_is_masked(self): """Generic exceptions from auth checks should be masked (return False).""" def buggy_auth_check(ctx: AuthContext) -> bool: @@ -173,9 +173,9 @@ def buggy_auth_check(ctx: AuthContext) -> bool: ctx = AuthContext(token=make_token(), component=make_tool()) # Should return False, not raise the ValueError - assert run_auth_checks(buggy_auth_check, ctx) is False + assert await run_auth_checks(buggy_auth_check, ctx) is False - def test_authorization_error_stops_chain(self): + async def test_authorization_error_stops_chain(self): """AuthorizationError should stop the check chain and propagate.""" from fastmcp.exceptions import AuthorizationError @@ -195,11 +195,62 @@ def check_3(ctx: AuthContext) -> bool: ctx = AuthContext(token=make_token(), component=make_tool()) with pytest.raises(AuthorizationError, match="Explicit denial"): - run_auth_checks([check_1, check_2, check_3], ctx) + await run_auth_checks([check_1, check_2, check_3], ctx) # Check 3 should not be called assert call_order == [1, 2] + async def test_async_check_passes(self): + """Async auth check functions should be awaited.""" + + async def async_check(ctx: AuthContext) -> bool: + return ctx.token is not None + + ctx = AuthContext(token=make_token(), component=make_tool()) + assert await run_auth_checks(async_check, ctx) is True + + async def test_async_check_fails(self): + """Async auth check that returns False should deny access.""" + + async def async_check(ctx: AuthContext) -> bool: + return False + + ctx = AuthContext(token=make_token(), component=make_tool()) + assert await run_auth_checks(async_check, ctx) is False + + async def test_mixed_sync_and_async_checks(self): + """A mix of sync and async checks should all be evaluated.""" + + def sync_check(ctx: AuthContext) -> bool: + return True + + async def async_check(ctx: AuthContext) -> bool: + return ctx.token is not None + + ctx = AuthContext(token=make_token(scopes=["test"]), component=make_tool()) + checks = [sync_check, async_check, require_scopes("test")] + assert await run_auth_checks(checks, ctx) is True + + async def test_async_check_exception_is_masked(self): + """Async checks that raise non-AuthorizationError should be masked.""" + + async def buggy_async_check(ctx: AuthContext) -> bool: + raise ValueError("async error") + + ctx = AuthContext(token=make_token(), component=make_tool()) + assert await run_auth_checks(buggy_async_check, ctx) is False + + async def test_async_check_authorization_error_propagates(self): + """Async checks that raise AuthorizationError should propagate.""" + from fastmcp.exceptions import AuthorizationError + + async def async_denial(ctx: AuthContext) -> bool: + raise AuthorizationError("Async denial") + + ctx = AuthContext(token=make_token(), component=make_tool()) + with pytest.raises(AuthorizationError, match="Async denial"): + await run_auth_checks(async_denial, ctx) + # ============================================================================= # Tests for tool-level auth with FastMCP @@ -454,6 +505,91 @@ def protected_tool() -> str: auth_context_var.reset(tok) +# ============================================================================= +# Integration tests with async auth checks +# ============================================================================= + + +class TestAsyncAuthIntegration: + async def test_async_auth_check_filters_tool_listing(self): + """Async auth checks should work for filtering tool lists.""" + mcp = FastMCP() + + async def check_claims(ctx: AuthContext) -> bool: + return ctx.token is not None and ctx.token.claims.get("role") == "admin" + + @mcp.tool(auth=check_claims) + def admin_tool() -> str: + return "admin" + + @mcp.tool + def public_tool() -> str: + return "public" + + # Without token, only public tool visible + tools = await mcp.list_tools() + assert len(tools) == 1 + assert tools[0].name == "public_tool" + + # With correct claims, both visible + token = make_token() + token.claims = {"role": "admin"} + tok = set_token(token) + try: + tools = await mcp.list_tools() + assert len(tools) == 2 + finally: + auth_context_var.reset(tok) + + async def test_async_auth_check_on_tool_call(self): + """Async auth checks should work for tool execution via client.""" + mcp = FastMCP() + + async def check_claims(ctx: AuthContext) -> bool: + return ctx.token is not None and ctx.token.claims.get("role") == "admin" + + @mcp.tool(auth=check_claims) + def admin_tool() -> str: + return "secret" + + token = make_token() + token.claims = {"role": "admin"} + tok = set_token(token) + try: + async with Client(mcp) as client: + result = await client.call_tool("admin_tool", {}) + assert result.content[0].text == "secret" + finally: + auth_context_var.reset(tok) + + async def test_async_auth_middleware(self): + """Async auth checks should work with AuthMiddleware.""" + + async def async_scope_check(ctx: AuthContext) -> bool: + return ctx.token is not None and "api" in ctx.token.scopes + + mcp = FastMCP(middleware=[AuthMiddleware(auth=async_scope_check)]) + + @mcp.tool + def api_tool() -> str: + return "api" + + # Without token, tool is hidden + result = await mcp._list_tools_mcp(__import__("mcp").types.ListToolsRequest()) + assert len(result.tools) == 0 + + # With token containing "api" scope, tool is visible + token = make_token(scopes=["api"]) + tok = set_token(token) + try: + result = await mcp._list_tools_mcp( + __import__("mcp").types.ListToolsRequest() + ) + assert len(result.tools) == 1 + finally: + auth_context_var.reset(tok) + + # ============================================================================= # Tests for transformed tools preserving auth # ============================================================================= diff --git a/tests/server/middleware/test_caching.py b/tests/server/middleware/test_caching.py index d326e82369..52e5c90c97 100644 --- a/tests/server/middleware/test_caching.py +++ b/tests/server/middleware/test_caching.py @@ -288,7 +288,7 @@ async def caching_server( request: pytest.FixtureRequest, ): """Create a FastMCP server for caching tests.""" - mcp = FastMCP("CachingTestServer") + mcp = FastMCP("CachingTestServer", dereference_schemas=False) with tempfile.TemporaryDirectory(ignore_cleanup_errors=True) as temp_dir: disk_store: DiskStore = DiskStore(directory=temp_dir) diff --git a/tests/server/middleware/test_dereference.py b/tests/server/middleware/test_dereference.py new file mode 100644 index 0000000000..0ababd7de1 --- /dev/null +++ b/tests/server/middleware/test_dereference.py @@ -0,0 +1,136 @@ +"""Tests for DereferenceRefsMiddleware.""" + +from enum import Enum + +import pydantic + +from fastmcp import Client, FastMCP + + +class Color(Enum): + RED = "red" + GREEN = "green" + BLUE = "blue" + + +class PaintRequest(pydantic.BaseModel): + color: Color + opacity: float = 1.0 + + +class TestDereferenceRefsMiddleware: + """End-to-end tests for the dereference_schemas server kwarg.""" + + async def test_dereference_schemas_true_inlines_refs(self): + """With dereference_schemas=True (default), tool schemas have $ref inlined.""" + mcp = FastMCP("test", dereference_schemas=True) + + @mcp.tool + def paint(request: PaintRequest) -> str: + return "ok" + + async with Client(mcp) as client: + tools = await client.list_tools() + + schema = tools[0].inputSchema + # $defs should be removed β€” everything inlined + assert "$defs" not in schema + # The Color enum should be inlined into the request property + assert "$ref" not in str(schema) + + async def test_dereference_schemas_false_preserves_refs(self): + """With dereference_schemas=False, $ref and $defs are preserved.""" + mcp = FastMCP("test", dereference_schemas=False) + + @mcp.tool + def paint(request: PaintRequest) -> str: + return "ok" + + async with Client(mcp) as client: + tools = await client.list_tools() + + schema = tools[0].inputSchema + # $defs should still be present + assert "$defs" in schema + + async def test_default_is_true(self): + """Default behavior dereferences $ref.""" + mcp = FastMCP("test") + + @mcp.tool + def paint(request: PaintRequest) -> str: + return "ok" + + async with Client(mcp) as client: + tools = await client.list_tools() + + schema = tools[0].inputSchema + assert "$defs" not in schema + + async def test_does_not_mutate_original_tool(self): + """Middleware should not mutate the shared Tool object.""" + mcp = FastMCP("test", dereference_schemas=True) + + @mcp.tool + def paint(request: PaintRequest) -> str: + return "ok" + + # Get the original tool's parameters before middleware runs + original_tools = await mcp._local_provider._list_tools() + assert "$defs" in original_tools[0].parameters + + # List tools through the client (triggers middleware) + async with Client(mcp) as client: + await client.list_tools() + + # The original tool stored in the server should still have $defs + tools_after = await mcp._local_provider._list_tools() + assert "$defs" in tools_after[0].parameters + + async def test_output_schema_dereferenced(self): + """Middleware also dereferences output_schema when present.""" + mcp = FastMCP("test", dereference_schemas=True) + + @mcp.tool + def paint(request: PaintRequest) -> PaintRequest: + return request + + async with Client(mcp) as client: + tools = await client.list_tools() + + tool = tools[0] + # Both input and output schemas should be dereferenced + assert "$defs" not in tool.inputSchema + if tool.outputSchema is not None: + assert "$defs" not in tool.outputSchema + + async def test_resource_templates_dereferenced(self): + """Middleware dereferences resource template schemas.""" + mcp = FastMCP("test", dereference_schemas=True) + + @mcp.resource("paint://{color}") + def get_paint(color: Color) -> str: + return f"paint: {color}" + + async with Client(mcp) as client: + templates = await client.list_resource_templates() + + # Resource templates also get their schemas dereferenced + # (only if the template parameters have $ref) + assert len(templates) == 1 + + async def test_no_ref_schemas_unchanged(self): + """Tools without $ref should pass through unmodified.""" + mcp = FastMCP("test", dereference_schemas=True) + + @mcp.tool + def add(a: int, b: int) -> int: + return a + b + + async with Client(mcp) as client: + tools = await client.list_tools() + + schema = tools[0].inputSchema + # Simple schema should not have $defs regardless + assert "$defs" not in schema + assert schema["properties"]["a"]["type"] == "integer" diff --git a/tests/server/mount/test_advanced.py b/tests/server/mount/test_advanced.py index 84619b478a..b8434b3a31 100644 --- a/tests/server/mount/test_advanced.py +++ b/tests/server/mount/test_advanced.py @@ -53,8 +53,8 @@ def temp_tool() -> str: tools = await main_app.list_tools() assert any(t.name == "sub_temp_tool" for t in tools) - # Remove the tool from sub_app using public API - sub_app.remove_tool("temp_tool") + # Remove the tool from sub_app + sub_app.local_provider.remove_tool("temp_tool") # The tool should no longer be accessible tools = await main_app.list_tools() diff --git a/tests/server/mount/test_filtering.py b/tests/server/mount/test_filtering.py index 413cc0e602..376db4c75b 100644 --- a/tests/server/mount/test_filtering.py +++ b/tests/server/mount/test_filtering.py @@ -11,7 +11,8 @@ class TestParentTagFiltering: async def test_parent_include_tags_filters_mounted_tools(self): """Test that parent include_tags filters out non-matching mounted tools.""" - parent = FastMCP("Parent", include_tags={"allowed"}) + parent = FastMCP("Parent") + parent.enable(tags={"allowed"}, only=True) mounted = FastMCP("Mounted") @mounted.tool(tags={"allowed"}) @@ -38,7 +39,8 @@ def blocked_tool() -> str: async def test_parent_exclude_tags_filters_mounted_tools(self): """Test that parent exclude_tags filters out matching mounted tools.""" - parent = FastMCP("Parent", exclude_tags={"blocked"}) + parent = FastMCP("Parent") + parent.disable(tags={"blocked"}) mounted = FastMCP("Mounted") @mounted.tool(tags={"production"}) @@ -58,7 +60,8 @@ def blocked_tool() -> str: async def test_parent_filters_apply_to_mounted_resources(self): """Test that parent tag filters apply to mounted resources.""" - parent = FastMCP("Parent", include_tags={"allowed"}) + parent = FastMCP("Parent") + parent.enable(tags={"allowed"}, only=True) mounted = FastMCP("Mounted") @mounted.resource("resource://allowed", tags={"allowed"}) @@ -78,7 +81,8 @@ def blocked_resource() -> str: async def test_parent_filters_apply_to_mounted_prompts(self): """Test that parent tag filters apply to mounted prompts.""" - parent = FastMCP("Parent", exclude_tags={"blocked"}) + parent = FastMCP("Parent") + parent.disable(tags={"blocked"}) mounted = FastMCP("Mounted") @mounted.prompt(tags={"allowed"}) diff --git a/tests/server/providers/local_provider_tools/test_tags.py b/tests/server/providers/local_provider_tools/test_tags.py index 019ed4c07a..fb32e3515b 100644 --- a/tests/server/providers/local_provider_tools/test_tags.py +++ b/tests/server/providers/local_provider_tools/test_tags.py @@ -40,7 +40,7 @@ class PersonDataclass: class TestToolTags: def create_server(self, include_tags=None, exclude_tags=None): - mcp = FastMCP(include_tags=include_tags, exclude_tags=exclude_tags) + mcp = FastMCP() @mcp.tool(tags={"a", "b"}) def tool_1() -> int: @@ -50,6 +50,11 @@ def tool_1() -> int: def tool_2() -> int: return 2 + if include_tags: + mcp.enable(tags=include_tags, only=True) + if exclude_tags: + mcp.disable(tags=exclude_tags) + return mcp async def test_include_tags_all_tools(self): diff --git a/tests/server/providers/openapi/test_comprehensive.py b/tests/server/providers/openapi/test_comprehensive.py index f55786e65d..a21a764db4 100644 --- a/tests/server/providers/openapi/test_comprehensive.py +++ b/tests/server/providers/openapi/test_comprehensive.py @@ -763,3 +763,198 @@ async def test_timeout_error_produces_useful_message( error_message = str(exc_info.value) assert "timed out" in error_message assert "ReadTimeout" in error_message + + +class TestOpenAPIPostEdgeCases: + """Tests for POST request edge cases that could cause unhandled errors.""" + + @pytest.fixture + def post_spec_with_empty_content_schema(self): + """OpenAPI spec where a POST endpoint has an empty content_schema.""" + return { + "openapi": "3.0.0", + "info": {"title": "Test API", "version": "1.0.0"}, + "servers": [{"url": "https://api.example.com"}], + "paths": { + "/items": { + "post": { + "operationId": "create_item", + "summary": "Create an item", + "requestBody": { + "required": True, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "value": {"type": "integer"}, + }, + "required": ["name"], + } + } + }, + }, + "responses": { + "201": { + "description": "Created", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "id": {"type": "integer"}, + "name": {"type": "string"}, + }, + } + } + }, + } + }, + } + }, + "/items/{item_id}": { + "post": { + "operationId": "update_item", + "summary": "Update an item", + "parameters": [ + { + "name": "item_id", + "in": "path", + "required": True, + "schema": {"type": "integer"}, + } + ], + "requestBody": { + "required": True, + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "value": {"type": "integer"}, + }, + } + } + }, + }, + "responses": { + "200": { + "description": "Updated", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "id": {"type": "integer"}, + "name": {"type": "string"}, + }, + } + } + }, + } + }, + } + }, + }, + } + + async def test_post_with_body_params(self, post_spec_with_empty_content_schema): + """POST with body parameters should build the request correctly.""" + mock_client = Mock(spec=httpx.AsyncClient) + mock_client.base_url = "https://api.example.com" + mock_client.headers = None + + mock_response = Mock(spec=Response) + mock_response.status_code = 201 + mock_response.json.return_value = {"id": 1, "name": "Test"} + mock_response.raise_for_status = Mock() + mock_client.send = AsyncMock(return_value=mock_response) + + server = create_openapi_server( + openapi_spec=post_spec_with_empty_content_schema, + client=mock_client, + ) + + async with Client(server) as mcp_client: + result = await mcp_client.call_tool( + "create_item", {"name": "Test", "value": 42} + ) + + mock_client.send.assert_called_once() + request = mock_client.send.call_args[0][0] + assert request.method == "POST" + body_data = json.loads(request.content) + assert body_data["name"] == "Test" + assert body_data["value"] == 42 + assert result is not None + + async def test_post_with_path_params_and_body( + self, post_spec_with_empty_content_schema + ): + """POST with both path parameters and body should route args correctly.""" + mock_client = Mock(spec=httpx.AsyncClient) + mock_client.base_url = "https://api.example.com" + mock_client.headers = None + + mock_response = Mock(spec=Response) + mock_response.status_code = 200 + mock_response.json.return_value = {"id": 5, "name": "Updated"} + mock_response.raise_for_status = Mock() + mock_client.send = AsyncMock(return_value=mock_response) + + server = create_openapi_server( + openapi_spec=post_spec_with_empty_content_schema, + client=mock_client, + ) + + async with Client(server) as mcp_client: + result = await mcp_client.call_tool( + "update_item", + {"item_id": 5, "name": "Updated", "value": 99}, + ) + + mock_client.send.assert_called_once() + request = mock_client.send.call_args[0][0] + assert request.method == "POST" + assert "/items/5" in str(request.url) + body_data = json.loads(request.content) + assert body_data["name"] == "Updated" + assert body_data["value"] == 99 + assert "item_id" not in body_data + assert result is not None + + async def test_unexpected_error_in_request_building_gives_useful_message(self): + """Unexpected exceptions during request building should produce useful errors.""" + from fastmcp.server.providers.openapi.components import OpenAPITool + from fastmcp.utilities.openapi.director import RequestDirector + from fastmcp.utilities.openapi.models import HTTPRoute + + mock_client = Mock(spec=httpx.AsyncClient) + mock_client.base_url = "https://api.example.com" + mock_client.headers = None + + route = HTTPRoute( + path="/test", + method="POST", + operation_id="test_op", + parameters=[], + responses={}, + response_schemas={}, + ) + + mock_director = Mock(spec=RequestDirector) + mock_director.build.side_effect = KeyError("missing_param") + + tool = OpenAPITool( + client=mock_client, + route=route, + director=mock_director, + name="test_tool", + description="test", + parameters={}, + ) + + with pytest.raises(ValueError, match="Error building request for POST /test"): + await tool.run({"some_arg": "value"}) diff --git a/tests/server/providers/openapi/test_openapi_features.py b/tests/server/providers/openapi/test_openapi_features.py index f55a1e038b..ef4aeb002b 100644 --- a/tests/server/providers/openapi/test_openapi_features.py +++ b/tests/server/providers/openapi/test_openapi_features.py @@ -1,7 +1,10 @@ """Tests for OpenAPI feature support in OpenAPIProvider.""" +from unittest.mock import AsyncMock, Mock + import httpx import pytest +from httpx import Response from fastmcp import FastMCP from fastmcp.client import Client @@ -769,3 +772,213 @@ async def test_resource_mime_type_without_schema(self): resources = await mcp_client.list_resources() assert len(resources) == 1 assert resources[0].mimeType == "text/plain" + + +class TestValidateOutput: + """Tests for the validate_output option on OpenAPIProvider.""" + + @pytest.fixture + def spec_with_output_schema(self): + return { + "openapi": "3.0.0", + "info": {"title": "Test API", "version": "1.0.0"}, + "servers": [{"url": "https://api.example.com"}], + "paths": { + "/users/{id}": { + "get": { + "operationId": "get_user", + "summary": "Get a user", + "parameters": [ + { + "name": "id", + "in": "path", + "required": True, + "schema": {"type": "integer"}, + } + ], + "responses": { + "200": { + "description": "A user", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "id": {"type": "integer"}, + "name": {"type": "string"}, + "email": {"type": "string"}, + }, + "required": ["id", "name"], + } + } + }, + } + }, + } + }, + "/items": { + "get": { + "operationId": "list_items", + "summary": "List items", + "responses": { + "200": { + "description": "An array of items", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": {"type": "string"} + }, + }, + } + } + }, + } + }, + } + }, + }, + } + + async def test_validate_output_true_preserves_extracted_schema( + self, spec_with_output_schema + ): + """Default validate_output=True uses the real extracted schema.""" + async with httpx.AsyncClient(base_url="https://api.example.com") as client: + provider = OpenAPIProvider( + openapi_spec=spec_with_output_schema, + client=client, + ) + + tool = provider._tools["get_user"] + assert tool.output_schema is not None + assert tool.output_schema.get("type") == "object" + assert "properties" in tool.output_schema + assert "id" in tool.output_schema["properties"] + + async def test_validate_output_false_uses_permissive_schema( + self, spec_with_output_schema + ): + """validate_output=False replaces the schema with a permissive one.""" + async with httpx.AsyncClient(base_url="https://api.example.com") as client: + provider = OpenAPIProvider( + openapi_spec=spec_with_output_schema, + client=client, + validate_output=False, + ) + + tool = provider._tools["get_user"] + assert tool.output_schema is not None + assert tool.output_schema == { + "type": "object", + "additionalProperties": True, + } + + async def test_validate_output_false_preserves_wrap_result_flag( + self, spec_with_output_schema + ): + """validate_output=False preserves x-fastmcp-wrap-result for array responses.""" + async with httpx.AsyncClient(base_url="https://api.example.com") as client: + provider = OpenAPIProvider( + openapi_spec=spec_with_output_schema, + client=client, + validate_output=False, + ) + + # The list_items endpoint returns an array, so the extracted schema + # would have had x-fastmcp-wrap-result=True + tool = provider._tools["list_items"] + assert tool.output_schema is not None + assert tool.output_schema.get("x-fastmcp-wrap-result") is True + assert tool.output_schema.get("additionalProperties") is True + + async def test_validate_output_false_allows_nonconforming_response( + self, spec_with_output_schema + ): + """With validate_output=False, responses that don't match the spec succeed.""" + mock_client = Mock(spec=httpx.AsyncClient) + mock_client.base_url = "https://api.example.com" + mock_client.headers = None + + # Return extra fields not in the schema + mock_response = Mock(spec=Response) + mock_response.status_code = 200 + mock_response.json.return_value = { + "id": 1, + "name": "Alice", + "email": "alice@example.com", + "unexpected_field": "surprise", + "nested": {"deep": True}, + } + mock_response.raise_for_status = Mock() + mock_client.send = AsyncMock(return_value=mock_response) + + provider = OpenAPIProvider( + openapi_spec=spec_with_output_schema, + client=mock_client, + validate_output=False, + ) + mcp = FastMCP("Test") + mcp.add_provider(provider) + + async with Client(mcp) as mcp_client: + result = await mcp_client.call_tool("get_user", {"id": 1}) + assert result is not None + # Structured content should have the full response including extra fields + assert result.structured_content is not None + assert result.structured_content["unexpected_field"] == "surprise" + + async def test_validate_output_false_wraps_non_dict_response( + self, spec_with_output_schema + ): + """Non-dict responses are wrapped even when schema says object and validate_output=False.""" + mock_client = Mock(spec=httpx.AsyncClient) + mock_client.base_url = "https://api.example.com" + mock_client.headers = None + + # Backend returns an array even though schema says object + mock_response = Mock(spec=Response) + mock_response.status_code = 200 + mock_response.json.return_value = [{"id": 1}, {"id": 2}] + mock_response.raise_for_status = Mock() + mock_client.send = AsyncMock(return_value=mock_response) + + provider = OpenAPIProvider( + openapi_spec=spec_with_output_schema, + client=mock_client, + validate_output=False, + ) + mcp = FastMCP("Test") + mcp.add_provider(provider) + + async with Client(mcp) as mcp_client: + result = await mcp_client.call_tool("get_user", {"id": 1}) + assert result is not None + # Non-dict should be wrapped so structured_content is always a dict + assert result.structured_content is not None + assert isinstance(result.structured_content, dict) + assert result.structured_content["result"] == [{"id": 1}, {"id": 2}] + + async def test_from_openapi_threads_validate_output(self, spec_with_output_schema): + """FastMCP.from_openapi() correctly passes validate_output to the provider.""" + mock_client = Mock(spec=httpx.AsyncClient) + mock_client.base_url = "https://api.example.com" + mock_client.headers = None + + server = FastMCP.from_openapi( + openapi_spec=spec_with_output_schema, + client=mock_client, + validate_output=False, + ) + + async with Client(server) as mcp_client: + tools = await mcp_client.list_tools() + get_user = next(t for t in tools if t.name == "get_user") + # With validate_output=False, the outputSchema should be permissive + assert get_user.outputSchema is not None + assert get_user.outputSchema.get("additionalProperties") is True + # Should NOT have specific properties from the original schema + assert "properties" not in get_user.outputSchema diff --git a/tests/server/providers/proxy/test_stateful_proxy_client.py b/tests/server/providers/proxy/test_stateful_proxy_client.py index 8ae0ce7133..79a280e724 100644 --- a/tests/server/providers/proxy/test_stateful_proxy_client.py +++ b/tests/server/providers/proxy/test_stateful_proxy_client.py @@ -1,15 +1,18 @@ import asyncio +from dataclasses import dataclass import pytest from anyio import create_task_group from mcp.types import LoggingLevel from fastmcp import Client, Context, FastMCP +from fastmcp.client.elicitation import ElicitResult from fastmcp.client.logging import LogMessage from fastmcp.client.transports import FastMCPTransport from fastmcp.exceptions import ToolError +from fastmcp.server.elicitation import AcceptedElicitation from fastmcp.server.providers.proxy import FastMCPProxy, StatefulProxyClient -from fastmcp.utilities.tests import find_available_port +from fastmcp.utilities.tests import find_available_port, run_server_async @pytest.fixture @@ -145,3 +148,54 @@ def tool_b() -> str: result_b = await client.call_tool("b_tool_b", {}) assert result_a.data == "a" assert result_b.data == "b" + + @pytest.mark.timeout(10) + async def test_stateful_proxy_elicitation_over_http(self): + """Elicitation through a stateful proxy over HTTP must not hang. + + When StatefulProxyClient reuses a session, the receive-loop task + inherits a stale request_ctx ContextVar from the first request. + The streamable-HTTP transport uses related_request_id to route + server-initiated messages (like elicitation) back to the correct + HTTP response stream. A stale request_id routes to a closed + stream, causing the elicitation to hang forever. + + This test runs the proxy over HTTP (not in-process) so the + transport's related_request_id routing is exercised. + """ + + @dataclass + class Person: + name: str + + backend = FastMCP("backend") + + @backend.tool + async def ask_name(ctx: Context) -> str: + result = await ctx.elicit("What is your name?", response_type=Person) + if isinstance(result, AcceptedElicitation): + assert isinstance(result.data, Person) + return f"Hello, {result.data.name}!" + return "declined" + + stateful_client = StatefulProxyClient(backend) + proxy = FastMCPProxy( + client_factory=stateful_client.new_stateful, + name="proxy", + ) + + async def elicitation_handler(message, response_type, params, ctx): + return ElicitResult(action="accept", content=response_type(name="Alice")) + + # Run the proxy over HTTP so the transport uses + # related_request_id routing for server-initiated messages. + async with run_server_async(proxy) as proxy_url: + async with Client( + proxy_url, elicitation_handler=elicitation_handler + ) as client: + result1 = await client.call_tool("ask_name", {}) + assert result1.data == "Hello, Alice!" + # Second call reuses the stateful session β€” this is the + # one that would hang without the fix. + result2 = await client.call_tool("ask_name", {}) + assert result2.data == "Hello, Alice!" diff --git a/tests/server/providers/test_local_provider_prompts.py b/tests/server/providers/test_local_provider_prompts.py index 6b0400e647..ac0df86a35 100644 --- a/tests/server/providers/test_local_provider_prompts.py +++ b/tests/server/providers/test_local_provider_prompts.py @@ -415,7 +415,7 @@ def sample_prompt() -> str: class TestPromptTags: def create_server(self, include_tags=None, exclude_tags=None): - mcp = FastMCP(include_tags=include_tags, exclude_tags=exclude_tags) + mcp = FastMCP() @mcp.prompt(tags={"a", "b"}) def prompt_1() -> str: @@ -425,6 +425,11 @@ def prompt_1() -> str: def prompt_2() -> str: return "2" + if include_tags: + mcp.enable(tags=include_tags, only=True) + if exclude_tags: + mcp.disable(tags=exclude_tags) + return mcp async def test_include_tags_all_prompts(self): diff --git a/tests/server/providers/test_local_provider_resources.py b/tests/server/providers/test_local_provider_resources.py index 972b9358b0..4c8da4559d 100644 --- a/tests/server/providers/test_local_provider_resources.py +++ b/tests/server/providers/test_local_provider_resources.py @@ -676,7 +676,7 @@ def get_template_data(param: str) -> str: class TestResourceTags: def create_server(self, include_tags=None, exclude_tags=None): - mcp = FastMCP(include_tags=include_tags, exclude_tags=exclude_tags) + mcp = FastMCP() @mcp.resource("resource://1", tags={"a", "b"}) def resource_1() -> str: @@ -686,6 +686,11 @@ def resource_1() -> str: def resource_2() -> str: return "2" + if include_tags: + mcp.enable(tags=include_tags, only=True) + if exclude_tags: + mcp.disable(tags=exclude_tags) + return mcp async def test_include_tags_all_resources(self): @@ -823,7 +828,7 @@ def sample_resource() -> str: class TestResourceTemplatesTags: def create_server(self, include_tags=None, exclude_tags=None): - mcp = FastMCP(include_tags=include_tags, exclude_tags=exclude_tags) + mcp = FastMCP() @mcp.resource("resource://1/{param}", tags={"a", "b"}) def template_resource_1(param: str) -> str: @@ -833,6 +838,11 @@ def template_resource_1(param: str) -> str: def template_resource_2(param: str) -> str: return f"Template resource 2: {param}" + if include_tags: + mcp.enable(tags=include_tags, only=True) + if exclude_tags: + mcp.disable(tags=exclude_tags) + return mcp async def test_include_tags_all_resources(self): diff --git a/tests/server/sampling/test_prepare_tools.py b/tests/server/sampling/test_prepare_tools.py new file mode 100644 index 0000000000..c08a27ae10 --- /dev/null +++ b/tests/server/sampling/test_prepare_tools.py @@ -0,0 +1,111 @@ +"""Tests for prepare_tools helper function.""" + +import pytest + +from fastmcp.server.sampling.run import prepare_tools +from fastmcp.server.sampling.sampling_tool import SamplingTool +from fastmcp.tools.function_tool import FunctionTool +from fastmcp.tools.tool_transform import ArgTransform, TransformedTool + + +class TestPrepareTools: + """Tests for prepare_tools().""" + + def test_prepare_tools_with_none(self): + """Test that None returns None.""" + result = prepare_tools(None) + assert result is None + + def test_prepare_tools_with_sampling_tool(self): + """Test that SamplingTool instances pass through.""" + + def search(query: str) -> str: + return f"Results: {query}" + + sampling_tool = SamplingTool.from_function(search) + result = prepare_tools([sampling_tool]) + + assert result is not None + assert len(result) == 1 + assert result[0] is sampling_tool + + def test_prepare_tools_with_function(self): + """Test that plain functions are converted.""" + + def search(query: str) -> str: + """Search function.""" + return f"Results: {query}" + + result = prepare_tools([search]) + + assert result is not None + assert len(result) == 1 + assert isinstance(result[0], SamplingTool) + assert result[0].name == "search" + + def test_prepare_tools_with_function_tool(self): + """Test that FunctionTool instances are converted.""" + + def search(query: str) -> str: + """Search the web.""" + return f"Results: {query}" + + function_tool = FunctionTool.from_function(search) + result = prepare_tools([function_tool]) + + assert result is not None + assert len(result) == 1 + assert isinstance(result[0], SamplingTool) + assert result[0].name == "search" + assert result[0].description == "Search the web." + + def test_prepare_tools_with_transformed_tool(self): + """Test that TransformedTool instances are converted.""" + + def original(query: str) -> str: + """Original tool.""" + return f"Results: {query}" + + function_tool = FunctionTool.from_function(original) + transformed_tool = TransformedTool.from_tool( + function_tool, + name="search_v2", + transform_args={"query": ArgTransform(name="q")}, + ) + + result = prepare_tools([transformed_tool]) + + assert result is not None + assert len(result) == 1 + assert isinstance(result[0], SamplingTool) + assert result[0].name == "search_v2" + assert "q" in result[0].parameters.get("properties", {}) + + def test_prepare_tools_with_mixed_types(self): + """Test that mixed tool types are all converted.""" + + def plain_fn(x: int) -> int: + return x * 2 + + def fn_for_tool(y: int) -> int: + return y * 3 + + function_tool = FunctionTool.from_function(fn_for_tool) + sampling_tool = SamplingTool.from_function(lambda z: z * 4, name="lambda_tool") + + result = prepare_tools([plain_fn, function_tool, sampling_tool]) + + assert result is not None + assert len(result) == 3 + assert all(isinstance(t, SamplingTool) for t in result) + + def test_prepare_tools_with_invalid_type(self): + """Test that invalid types raise TypeError.""" + + with pytest.raises(TypeError, match="Expected SamplingTool, FunctionTool"): + prepare_tools(["not a tool"]) # type: ignore[arg-type] + + def test_prepare_tools_empty_list(self): + """Test that empty list returns None.""" + result = prepare_tools([]) + assert result is None diff --git a/tests/server/sampling/test_sampling_tool.py b/tests/server/sampling/test_sampling_tool.py index 8c2dba7a8e..a95b393acb 100644 --- a/tests/server/sampling/test_sampling_tool.py +++ b/tests/server/sampling/test_sampling_tool.py @@ -3,6 +3,8 @@ import pytest from fastmcp.server.sampling import SamplingTool +from fastmcp.tools.function_tool import FunctionTool +from fastmcp.tools.tool_transform import ArgTransform, TransformedTool class TestSamplingToolFromFunction: @@ -119,3 +121,172 @@ def search(query: str) -> str: assert sdk_tool.name == "search" assert sdk_tool.description == "Search the web." assert "query" in sdk_tool.inputSchema.get("properties", {}) + + +class TestSamplingToolFromCallableTool: + """Tests for SamplingTool.from_callable_tool().""" + + def test_from_function_tool(self): + """Test converting a FunctionTool to SamplingTool.""" + + def search(query: str) -> str: + """Search the web.""" + return f"Results for: {query}" + + function_tool = FunctionTool.from_function(search) + sampling_tool = SamplingTool.from_callable_tool(function_tool) + + assert sampling_tool.name == "search" + assert sampling_tool.description == "Search the web." + assert "query" in sampling_tool.parameters.get("properties", {}) + # fn is now a wrapper that calls tool.run() for proper result processing + assert callable(sampling_tool.fn) + + def test_from_function_tool_with_overrides(self): + """Test converting FunctionTool with name/description overrides.""" + + def search(query: str) -> str: + """Search the web.""" + return f"Results for: {query}" + + function_tool = FunctionTool.from_function(search) + sampling_tool = SamplingTool.from_callable_tool( + function_tool, + name="web_search", + description="Search the internet", + ) + + assert sampling_tool.name == "web_search" + assert sampling_tool.description == "Search the internet" + + def test_from_transformed_tool(self): + """Test converting a TransformedTool to SamplingTool.""" + + def original(query: str, limit: int) -> str: + """Original tool.""" + return f"Results for: {query} (limit: {limit})" + + function_tool = FunctionTool.from_function(original) + transformed_tool = TransformedTool.from_tool( + function_tool, + name="search_transformed", + transform_args={"query": ArgTransform(name="q")}, + ) + + sampling_tool = SamplingTool.from_callable_tool(transformed_tool) + + assert sampling_tool.name == "search_transformed" + assert sampling_tool.description == "Original tool." + # The transformed tool should have 'q' instead of 'query' + assert "q" in sampling_tool.parameters.get("properties", {}) + assert "limit" in sampling_tool.parameters.get("properties", {}) + + async def test_from_function_tool_execution(self): + """Test that converted FunctionTool executes correctly.""" + + def add(a: int, b: int) -> int: + """Add two numbers.""" + return a + b + + function_tool = FunctionTool.from_function(add) + sampling_tool = SamplingTool.from_callable_tool(function_tool) + + result = await sampling_tool.run({"a": 2, "b": 3}) + assert result == 5 + + async def test_from_transformed_tool_execution(self): + """Test that converted TransformedTool executes correctly.""" + + def multiply(x: int, y: int) -> int: + """Multiply two numbers.""" + return x * y + + function_tool = FunctionTool.from_function(multiply) + transformed_tool = TransformedTool.from_tool( + function_tool, + transform_args={"x": ArgTransform(name="a"), "y": ArgTransform(name="b")}, + ) + + sampling_tool = SamplingTool.from_callable_tool(transformed_tool) + + # Use the transformed parameter names + result = await sampling_tool.run({"a": 3, "b": 4}) + # Result should be unwrapped from ToolResult + assert result == 12 + + def test_from_invalid_tool_type(self): + """Test that from_callable_tool rejects non-tool objects.""" + + class NotATool: + pass + + with pytest.raises( + TypeError, + match="Expected FunctionTool or TransformedTool", + ): + SamplingTool.from_callable_tool(NotATool()) # type: ignore[arg-type] + + def test_from_plain_function_fails(self): + """Test that plain functions are rejected by from_callable_tool.""" + + def my_function(): + pass + + with pytest.raises(TypeError, match="Expected FunctionTool or TransformedTool"): + SamplingTool.from_callable_tool(my_function) # type: ignore[arg-type] + + async def test_from_function_tool_with_output_schema(self): + """Test that FunctionTool with output_schema is handled correctly.""" + + def search(query: str) -> dict: + """Search for something.""" + return {"results": ["item1", "item2"], "count": 2} + + # Create FunctionTool with x-fastmcp-wrap-result + function_tool = FunctionTool.from_function( + search, + output_schema={ + "type": "object", + "properties": { + "results": {"type": "array"}, + "count": {"type": "integer"}, + }, + "x-fastmcp-wrap-result": True, + }, + ) + + sampling_tool = SamplingTool.from_callable_tool(function_tool) + + # Run the tool - should unwrap the {"result": {...}} wrapper + result = await sampling_tool.run({"query": "test"}) + + # Should get the unwrapped dict, not ToolResult + assert isinstance(result, dict) + assert result == {"results": ["item1", "item2"], "count": 2} + + async def test_from_function_tool_without_wrap_result(self): + """Test that FunctionTool without x-fastmcp-wrap-result is handled correctly.""" + + def get_data() -> dict: + """Get some data.""" + return {"status": "ok", "value": 42} + + # Create FunctionTool with output_schema but no wrap-result flag + function_tool = FunctionTool.from_function( + get_data, + output_schema={ + "type": "object", + "properties": { + "status": {"type": "string"}, + "value": {"type": "integer"}, + }, + }, + ) + + sampling_tool = SamplingTool.from_callable_tool(function_tool) + + # Run the tool - should return structured_content directly + result = await sampling_tool.run({}) + + assert isinstance(result, dict) + assert result == {"status": "ok", "value": 42} diff --git a/tests/server/tasks/test_context_background_task.py b/tests/server/tasks/test_context_background_task.py index b778a63b29..c7eb9e90c2 100644 --- a/tests/server/tasks/test_context_background_task.py +++ b/tests/server/tasks/test_context_background_task.py @@ -1,11 +1,28 @@ -"""Tests for Context background task support (SEP-1686).""" +"""Tests for Context background task support (SEP-1686). + +Tests Context API surface (unit) and background task elicitation (integration). +Integration tests use Client(mcp) with the real memory:// Docket backend β€” +no mocking of Redis, Docket, or session internals. +""" + +import asyncio +from typing import cast import pytest +from mcp import ServerSession from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.client.elicitation import ElicitResult +from fastmcp.server.auth import AccessToken from fastmcp.server.context import Context -from fastmcp.server.elicitation import AcceptedElicitation -from fastmcp.server.tasks.elicitation import elicit_for_task, handle_task_input +from fastmcp.server.dependencies import get_access_token +from fastmcp.server.elicitation import AcceptedElicitation, DeclinedElicitation +from fastmcp.server.tasks.elicitation import handle_task_input + +# ============================================================================= +# Unit tests: Context API surface (no Redis/Docket needed) +# ============================================================================= class TestContextBackgroundTaskSupport: @@ -30,7 +47,7 @@ def test_context_task_id_is_readonly(self): mcp = FastMCP("test") ctx = Context(mcp, task_id="test-task-123") with pytest.raises(AttributeError): - ctx.task_id = "new-id" # type: ignore[misc] + setattr(ctx, "task_id", "new-id") class TestContextSessionProperty: @@ -52,9 +69,10 @@ class MockSession: _fastmcp_state_prefix = "test-session" mock_session = MockSession() - ctx = Context(mcp, session=mock_session, task_id="test-task-123") # type: ignore[arg-type] + ctx = Context( + mcp, session=cast(ServerSession, mock_session), task_id="test-task-123" + ) - # In background task mode, should return the stored session assert ctx.session is mock_session def test_session_uses_stored_session_during_on_initialize(self): @@ -65,32 +83,75 @@ class MockSession: _fastmcp_state_prefix = "test-session" mock_session = MockSession() - # Simulating on_initialize: has session but not a background task - ctx = Context(mcp, session=mock_session) # type: ignore[arg-type] + ctx = Context(mcp, session=cast(ServerSession, mock_session)) - # Should return the stored session as fallback assert ctx.session is mock_session class TestContextElicitBackgroundTask: """Tests for Context.elicit() in background task mode.""" - @pytest.mark.asyncio async def test_elicit_raises_when_background_task_but_no_docket(self): """elicit() should raise when in background task mode but Docket unavailable.""" mcp = FastMCP("test") ctx = Context(mcp, task_id="test-task-123") - # Set up minimal session mock class MockSession: _fastmcp_state_prefix = "test-session" - ctx._session = MockSession() # type: ignore[assignment] + ctx._session = cast(ServerSession, MockSession()) with pytest.raises(RuntimeError, match="Docket"): await ctx.elicit("Need input", str) +class TestElicitFailFast: + """Tests for elicit_for_task fail-fast on notification push failure.""" + + async def test_elicit_returns_cancel_when_notification_push_fails(self): + """elicit_for_task should return cancel immediately when push_notification fails. + + If the client can't receive the input_required notification, waiting + for a response that will never come would block for up to 1 hour. + Instead, we return cancel immediately (fail-fast). + + This test patches ONLY push_notification β€” all other components + (Docket, Redis, session) are real via the memory:// backend. + """ + from unittest.mock import patch + + from fastmcp.server.elicitation import CancelledElicitation + + mcp = FastMCP("failfast-test") + elicit_started = asyncio.Event() + captured: dict[str, object] = {} + + @mcp.tool(task=True) + async def failfast_tool(ctx: Context) -> str: + elicit_started.set() + result = await ctx.elicit("This notification will fail", str) + captured["result_type"] = type(result).__name__ + captured["is_cancelled"] = isinstance(result, CancelledElicitation) + return "done" + + # Patch push_notification BEFORE starting client so it's active + # when the tool runs in the Docket worker + with patch( + "fastmcp.server.tasks.notifications.push_notification", + side_effect=ConnectionError("Redis queue unavailable"), + ): + async with Client(mcp) as client: + task = await client.call_tool("failfast_tool", {}, task=True) + await asyncio.wait_for(elicit_started.wait(), timeout=5.0) + await task.wait(timeout=10.0) + result = await task.result() + assert result.data == "done" + + # The tool should have received CancelledElicitation (fail-fast) + assert captured["is_cancelled"] is True + assert captured["result_type"] == "CancelledElicitation" + + class TestContextDocumentation: """Tests to verify Context documentation and API surface.""" @@ -110,753 +171,272 @@ def test_session_has_docstring(self): assert "background task" in Context.session.fget.__doc__.lower() -class TestBackgroundTaskElicitationE2E: - """End-to-end tests for background task elicitation (SEP-1686). +# ============================================================================= +# Integration tests: Client(mcp) + memory:// Docket backend +# ============================================================================= + - These tests demonstrate the full flow: - 1. Client calls a tool with task=True (background execution) - 2. Tool uses ctx.elicit() to request user input - 3. Task status changes to "input_required" - 4. Client sends input via handle_task_input() - 5. Task resumes and completes with the elicited value +class TestBackgroundTaskIntegration: + """Integration tests for background task context using real Docket memory backend. - This simulates what a client would see when interacting with - a background task that needs user input. + These tests use Client(mcp) with the memory:// broker β€” no mocking. + The memory:// backend provides a fully functional in-memory Redis store + that Docket uses automatically when running tests. """ - async def test_elicit_for_task_stores_request_in_redis(self): - """Test that elicit_for_task stores the elicitation request in Redis. + async def test_report_progress_in_background_task(self): + """report_progress() should complete without error in a background task.""" + mcp = FastMCP("progress-test") + progress_reported = asyncio.Event() - This tests the Redis coordination layer that enables client interaction. - When a background task calls elicit(), the request is stored in Redis - so clients can retrieve it and respond. - """ - from unittest.mock import AsyncMock, MagicMock, patch + @mcp.tool(task=True) + async def progress_tool(ctx: Context) -> str: + await ctx.report_progress(0, 100, "Starting...") + await ctx.report_progress(50, 100, "Half done") + await ctx.report_progress(100, 100, "Complete") + progress_reported.set() + return "done" - from fastmcp.server.tasks.elicitation import ( - elicit_for_task, - ) + async with Client(mcp) as client: + task = await client.call_tool("progress_tool", {}, task=True) + await asyncio.wait_for(progress_reported.wait(), timeout=5.0) + await task.wait(timeout=5.0) + result = await task.result() + assert result.data == "done" - # Create mocks - mock_redis = AsyncMock() - mock_redis.set = AsyncMock() - mock_redis.get = AsyncMock(return_value=None) # No response yet - mock_redis.delete = AsyncMock() - - mock_docket = MagicMock() - mock_docket.redis = MagicMock(return_value=AsyncMock()) - mock_docket.redis.return_value.__aenter__ = AsyncMock(return_value=mock_redis) - mock_docket.redis.return_value.__aexit__ = AsyncMock() - mock_docket.key = lambda k: k - - mock_fastmcp = MagicMock() - mock_fastmcp._docket = mock_docket - - mock_session = MagicMock() - mock_session._fastmcp_state_prefix = "test-session-id" - mock_session.send_notification = AsyncMock() - - # Call elicit_for_task with a short timeout to avoid blocking - with patch("fastmcp.server.tasks.elicitation.ELICIT_TTL_SECONDS", 1): - with patch("fastmcp.server.tasks.elicitation.asyncio.sleep", AsyncMock()): - # Make it return after first poll - mock_redis.get = AsyncMock( - return_value=b'{"action": "accept", "content": {"value": 42}}' - ) - - result = await elicit_for_task( - task_id="test-task-123", - session=mock_session, - message="Please provide a number", - schema={ - "type": "object", - "properties": {"value": {"type": "integer"}}, - }, - fastmcp=mock_fastmcp, - ) - - # Verify the result - assert result.action == "accept" - assert result.content == {"value": 42} - - # Verify Redis operations were called - assert mock_redis.set.call_count >= 2 # request + status - - async def test_handle_task_input_stores_response(self): - """Test that handle_task_input stores the response in Redis. - - This tests the client-side flow: when a client sends input via - tasks/sendInput, the response is stored in Redis for the waiting task. - """ - from unittest.mock import AsyncMock, MagicMock - - # Create mocks - mock_redis = AsyncMock() - mock_redis.get = AsyncMock(return_value=b"waiting") # Status is waiting - mock_redis.set = AsyncMock() - - mock_docket = MagicMock() - mock_docket.redis = MagicMock(return_value=AsyncMock()) - mock_docket.redis.return_value.__aenter__ = AsyncMock(return_value=mock_redis) - mock_docket.redis.return_value.__aexit__ = AsyncMock() - mock_docket.key = lambda k: k - - mock_fastmcp = MagicMock() - mock_fastmcp._docket = mock_docket - - # Call handle_task_input - success = await handle_task_input( - task_id="test-task-123", - session_id="test-session-id", - action="accept", - content={"value": 42}, - fastmcp=mock_fastmcp, - ) + async def test_context_wiring_in_background_task(self): + """Context should be properly wired with task_id and session_id.""" + mcp = FastMCP("wiring-test") + task_completed = asyncio.Event() + captured: dict[str, object] = {} - # Verify success - assert success is True + @mcp.tool(task=True) + async def verify_wiring(ctx: Context) -> str: + captured["task_id"] = ctx.task_id + captured["session_id"] = ctx.session_id + captured["is_background"] = ctx.is_background_task + task_completed.set() + return "ok" - # Verify Redis operations - assert mock_redis.set.call_count == 2 # response + status update + async with Client(mcp) as client: + task = await client.call_tool("verify_wiring", {}, task=True) + await asyncio.wait_for(task_completed.wait(), timeout=5.0) + await task.wait(timeout=5.0) + result = await task.result() + assert result.data == "ok" - async def test_handle_task_input_rejects_when_not_waiting(self): - """Test that handle_task_input rejects input when task isn't waiting. + assert captured["task_id"] is not None + assert captured["session_id"] is not None + assert captured["is_background"] is True - This verifies proper state management - clients can only send input - when a task is actually waiting for it. - """ - from unittest.mock import AsyncMock, MagicMock - - mock_redis = AsyncMock() - mock_redis.get = AsyncMock(return_value=None) # No waiting status - - mock_docket = MagicMock() - mock_docket.redis = MagicMock(return_value=AsyncMock()) - mock_docket.redis.return_value.__aenter__ = AsyncMock(return_value=mock_redis) - mock_docket.redis.return_value.__aexit__ = AsyncMock() - mock_docket.key = lambda k: k - - mock_fastmcp = MagicMock() - mock_fastmcp._docket = mock_docket - - success = await handle_task_input( - task_id="test-task-123", - session_id="test-session-id", - action="accept", - content={"value": 42}, - fastmcp=mock_fastmcp, - ) + async def test_elicit_accept_flow(self): + """E2E: tool elicits input, client accepts via elicitation_handler.""" + mcp = FastMCP("elicit-accept-test") - # Should fail because no task is waiting - assert success is False + @mcp.tool(task=True) + async def ask_name(ctx: Context) -> str: + result = await ctx.elicit("What is your name?", str) + if isinstance(result, AcceptedElicitation): + return f"Hello, {result.data}!" + return "No name provided" - async def test_elicit_for_task_sends_notification(self): - """Test that elicit_for_task sends input_required notification. + async def handler(message, response_type, params, ctx): + return ElicitResult(action="accept", content={"value": "Bob"}) - Per SEP-1686, the server should send notifications/tasks/updated - with status="input_required" when a task needs input. - """ - from unittest.mock import AsyncMock, MagicMock, patch + async with Client(mcp, elicitation_handler=handler) as client: + task = await client.call_tool("ask_name", {}, task=True) + await task.wait(timeout=10.0) + result = await task.result() + assert result.data == "Hello, Bob!" - mock_redis = AsyncMock() - mock_redis.set = AsyncMock() - mock_redis.get = AsyncMock( - return_value=b'{"action": "accept", "content": {"value": 1}}' - ) - mock_redis.delete = AsyncMock() - - mock_docket = MagicMock() - mock_docket.redis = MagicMock(return_value=AsyncMock()) - mock_docket.redis.return_value.__aenter__ = AsyncMock(return_value=mock_redis) - mock_docket.redis.return_value.__aexit__ = AsyncMock() - mock_docket.key = lambda k: k - - mock_fastmcp = MagicMock() - mock_fastmcp._docket = mock_docket - - mock_session = MagicMock() - mock_session._fastmcp_state_prefix = "test-session" - mock_session.send_notification = AsyncMock() - - with patch("fastmcp.server.tasks.elicitation.asyncio.sleep", AsyncMock()): - await elicit_for_task( - task_id="my-task-id", - session=mock_session, - message="Enter value", - schema={"type": "object"}, - fastmcp=mock_fastmcp, - ) + async def test_elicit_decline_flow(self): + """E2E: tool elicits input, client declines via elicitation_handler.""" + mcp = FastMCP("elicit-decline-test") - # Verify notification was sent - mock_session.send_notification.assert_called_once() - notification = mock_session.send_notification.call_args[0][0] - assert notification.method == "notifications/tasks/updated" + @mcp.tool(task=True) + async def optional_input(ctx: Context) -> str: + result = await ctx.elicit("Want to provide a name?", str) + if isinstance(result, DeclinedElicitation): + return "User declined" + if isinstance(result, AcceptedElicitation): + return f"Got: {result.data}" + return "Cancelled" - async def test_elicit_for_task_timeout_returns_cancel(self): - """Test that elicit_for_task returns cancel on timeout. + async def handler(message, response_type, params, ctx): + return ElicitResult(action="decline") - If no response is received within the TTL, the elicitation - should be treated as cancelled. - """ - from unittest.mock import AsyncMock, MagicMock, patch - - mock_redis = AsyncMock() - mock_redis.set = AsyncMock() - mock_redis.get = AsyncMock(return_value=None) # Never responds - mock_redis.delete = AsyncMock() - - mock_docket = MagicMock() - mock_docket.redis = MagicMock(return_value=AsyncMock()) - mock_docket.redis.return_value.__aenter__ = AsyncMock(return_value=mock_redis) - mock_docket.redis.return_value.__aexit__ = AsyncMock() - mock_docket.key = lambda k: k - - mock_fastmcp = MagicMock() - mock_fastmcp._docket = mock_docket - - mock_session = MagicMock() - mock_session._fastmcp_state_prefix = "test-session" - mock_session.send_notification = AsyncMock() - - # Use very short TTL for test - with patch("fastmcp.server.tasks.elicitation.ELICIT_TTL_SECONDS", 0.1): - with patch( - "fastmcp.server.tasks.elicitation.asyncio.sleep", - AsyncMock(), - ): - result = await elicit_for_task( - task_id="timeout-task", - session=mock_session, - message="This will timeout", - schema={"type": "object"}, - fastmcp=mock_fastmcp, - ) - - # Should return cancel on timeout - assert result.action == "cancel" - assert result.content is None - - async def test_elicit_notification_includes_full_schema(self): - """Test that the notification includes the full JSON schema for complex types. - - This test demonstrates what the client sees when eliciting a Pydantic model. - The client receives a full JSON Schema that describes the expected input, - which they can use to: - - Render a dynamic form - - Validate user input before sending - - Show field descriptions to the user - - Example notification metadata for a UserInfo model: - ```json - { - "modelcontextprotocol.io/related-task": { - "taskId": "test-task", - "status": "input_required", - "statusMessage": "Please provide user info", - "elicitation": { - "requestId": "...", - "message": "Please provide user info", - "requestedSchema": { - "type": "object", - "properties": { - "name": {"type": "string", "title": "Name"}, - "age": {"type": "integer", "title": "Age"} - }, - "required": ["name", "age"], - "title": "UserInfo" - } - } - } - } - ``` - """ - from unittest.mock import AsyncMock, MagicMock, patch + async with Client(mcp, elicitation_handler=handler) as client: + task = await client.call_tool("optional_input", {}, task=True) + await task.wait(timeout=10.0) + result = await task.result() + assert result.data == "User declined" + async def test_elicit_with_pydantic_model(self): + """E2E: tool elicits structured Pydantic input via elicitation_handler.""" from pydantic import BaseModel class UserInfo(BaseModel): - """User information for registration.""" - name: str age: int - mock_redis = AsyncMock() - mock_redis.set = AsyncMock() - mock_redis.get = AsyncMock( - return_value=b'{"action": "accept", "content": {"name": "Alice", "age": 30}}' - ) - mock_redis.delete = AsyncMock() + mcp = FastMCP("elicit-pydantic-test") - mock_docket = MagicMock() - mock_docket.redis = MagicMock(return_value=AsyncMock()) - mock_docket.redis.return_value.__aenter__ = AsyncMock(return_value=mock_redis) - mock_docket.redis.return_value.__aexit__ = AsyncMock() - mock_docket.key = lambda k: k - - mock_fastmcp = MagicMock() - mock_fastmcp._docket = mock_docket - - mock_session = MagicMock() - mock_session._fastmcp_state_prefix = "test-session" - mock_session.send_notification = AsyncMock() + @mcp.tool(task=True) + async def get_user_info(ctx: Context) -> str: + result = await ctx.elicit("Provide user info", UserInfo) + if isinstance(result, AcceptedElicitation): + assert isinstance(result.data, UserInfo) + return f"{result.data.name} is {result.data.age}" + return "No info" - # Create task-aware context - ctx = Context( - mock_fastmcp, - session=mock_session, - task_id="schema-test-task", - ) + async def handler(message, response_type, params, ctx): + return ElicitResult(action="accept", content={"name": "Alice", "age": 30}) - # Call elicit with a Pydantic model type - with patch("fastmcp.server.tasks.elicitation.asyncio.sleep", AsyncMock()): - result = await ctx.elicit("Please provide user info", UserInfo) - - # Verify the notification includes the full schema - mock_session.send_notification.assert_called_once() - notification = mock_session.send_notification.call_args[0][0] - meta = notification._meta - related_task = meta["modelcontextprotocol.io/related-task"] - schema = related_task["elicitation"]["requestedSchema"] - - # Verify schema structure matches UserInfo - assert schema["type"] == "object" - assert "properties" in schema - assert "name" in schema["properties"] - assert "age" in schema["properties"] - assert schema["properties"]["name"]["type"] == "string" - assert schema["properties"]["age"]["type"] == "integer" - assert "required" in schema - assert set(schema["required"]) == {"name", "age"} - - # Verify the result is properly parsed into the Pydantic model - assert result.action == "accept" - assert isinstance(result, AcceptedElicitation) # Type narrowing - assert isinstance(result.data, UserInfo) - assert result.data.name == "Alice" - assert result.data.age == 30 - - -class TestBackgroundTaskContextWiring: - """Integration tests for Context wiring in Docket workers. - - These tests verify that when a background task runs in a Docket worker, - the Context dependency is properly created with task_id and session, - allowing ctx.elicit() to work transparently. - - Per Chris Guidry's review request: "Could we get at least one test showing - the end-to-end of it working, with a background task that's eliciting input? - This will help with what the client-side sees when this happens." - - The key test is `test_context_elicit_full_flow_with_mocked_redis` which shows: - - CLIENT RECEIVES: - notifications/tasks/updated with: - - taskId: the background task ID - - status: "input_required" - - statusMessage: the elicit prompt - - elicitation.requestedSchema: JSON schema for expected input - - CLIENT RESPONDS: - handle_task_input(task_id, session_id, action="accept", content={...}) - - TOOL RECEIVES: - AcceptedElicitation(action="accept", data=) - """ + async with Client(mcp, elicitation_handler=handler) as client: + task = await client.call_tool("get_user_info", {}, task=True) + await task.wait(timeout=10.0) + result = await task.result() + assert result.data == "Alice is 30" - async def test_context_is_created_with_task_id_in_worker(self): - """Test that Context is created with task_id when running in Docket worker. + async def test_handle_task_input_rejects_when_not_waiting(self): + """handle_task_input returns False when no task is waiting for input.""" + mcp = FastMCP("reject-test") - This verifies the wiring from _CurrentContext that creates a task-aware - Context when get_task_context() returns TaskContextInfo. - """ - from unittest.mock import MagicMock, patch + @mcp.tool(task=True) + async def simple_tool() -> str: + return "done" - from fastmcp.server.dependencies import ( - TaskContextInfo, - _current_server, - _CurrentContext, - _task_sessions, - ) + async with Client(mcp) as client: + task = await client.call_tool("simple_tool", {}, task=True) + await task.wait(timeout=5.0) - # Set up mock server - mock_server = MagicMock() - mock_server._docket = MagicMock() - server_token = _current_server.set(MagicMock(return_value=mock_server)) - - # Set up mock session in registry - mock_session = MagicMock() - mock_session._fastmcp_state_prefix = "test-session-id" - _task_sessions["test-session-id"] = MagicMock(return_value=mock_session) - - try: - # Mock get_task_context to return TaskContextInfo - task_info = TaskContextInfo( - task_id="test-task-123", - session_id="test-session-id", + # Task already completed β€” no elicitation waiting + success = await handle_task_input( + task_id=task.task_id, + session_id="nonexistent-session", + action="accept", + content={"value": "too late"}, + fastmcp=mcp, ) - with patch( - "fastmcp.server.dependencies.get_task_context", - return_value=task_info, - ): - # Create the dependency and enter it - dep = _CurrentContext() - ctx = await dep.__aenter__() - - # Verify context is task-aware - assert ctx.is_background_task is True - assert ctx.task_id == "test-task-123" - assert ctx.session is mock_session - - # Clean up - await dep.__aexit__(None, None, None) - finally: - _current_server.reset(server_token) - _task_sessions.pop("test-session-id", None) - - async def test_context_falls_back_to_foreground_mode(self): - """Test that Context uses foreground mode when not in worker context. - - When _current_context has a value (normal request handling), - _CurrentContext should return that context instead of creating a new one. - """ - from unittest.mock import MagicMock - - from fastmcp.server.context import Context, _current_context - from fastmcp.server.dependencies import _CurrentContext + assert success is False - mcp = MagicMock() - foreground_ctx = Context(mcp) - # Set the foreground context - token = _current_context.set(foreground_ctx) - try: - dep = _CurrentContext() - ctx = await dep.__aenter__() +class TestAccessTokenInBackgroundTasks: + """Tests for access token availability in background tasks (#3095). - # Should return the foreground context - assert ctx is foreground_ctx - assert ctx.is_background_task is False + Integration tests use Client(mcp) with the real memory:// Docket backend. + The token snapshot/restore round-trip flows through actual Redis (fakeredis). - await dep.__aexit__(None, None, None) - finally: - _current_context.reset(token) - - async def test_session_registered_when_task_submitted(self): - """Test that session is registered when a task is submitted to Docket. - - This verifies that submit_to_docket calls register_task_session, - which enables the Context wiring in background workers. - """ - import asyncio - - from fastmcp import FastMCP - from fastmcp.client import Client - from fastmcp.server.dependencies import get_task_session + Note: async tests run in isolated asyncio tasks, so ContextVar changes + are automatically scoped β€” no cleanup required. + """ - mcp = FastMCP("test-server") + async def test_token_round_trips_through_background_task(self): + """E2E: token set at submit time is available inside the worker.""" + from mcp.server.auth.middleware.auth_context import auth_context_var + from mcp.server.auth.middleware.bearer_auth import AuthenticatedUser - task_started = asyncio.Event() - session_id_captured = None + mcp = FastMCP("token-roundtrip") @mcp.tool(task=True) - async def capture_session_tool(ctx: Context) -> str: - """Tool that captures the session ID for verification.""" - nonlocal session_id_captured - task_started.set() - # Access session to verify it works - session_id_captured = ctx.session_id - return "done" + async def check_token(ctx: Context) -> str: + token = get_access_token() + if token is None: + return "no-token" + return f"{token.token}|{token.client_id}" + + test_token = AccessToken( + token="roundtrip-jwt", + client_id="test-client", + scopes=["read"], + claims={"sub": "user-1"}, + ) + auth_context_var.set(AuthenticatedUser(test_token)) async with Client(mcp) as client: - # Start the task - task = await client.call_tool("capture_session_tool", {}, task=True) - assert task is not None - - # Wait for the task to start - await asyncio.wait_for(task_started.wait(), timeout=5.0) - - # Verify the session was registered - assert session_id_captured is not None - # The session should be retrievable via get_task_session - # (it was registered when the task was submitted) - # Session may be available or None if cleaned up - key is registration happened - _ = get_task_session(session_id_captured) - - # Wait for task to complete - await task.wait(timeout=5.0) + task = await client.call_tool("check_token", {}, task=True) result = await task.result() - assert result.data == "done" + assert result.data == "roundtrip-jwt|test-client" - async def test_context_elicit_works_in_background_task(self): - """E2E test: verify Context is properly wired in background tasks. - - This test demonstrates that: - 1. Context.task_id is set correctly in background tasks - 2. Context.is_background_task returns True - 3. Context.session_id is available - - The wiring is what enables ctx.elicit() to work in background tasks. - """ - import asyncio - - from fastmcp import FastMCP - from fastmcp.client import Client - from fastmcp.server.context import Context - - mcp = FastMCP("context-wiring-test") - - # Track what happens in the background task - task_completed = asyncio.Event() - captured_task_id: str | None = None - captured_session_id: str | None = None - captured_is_background: bool | None = None + async def test_no_token_when_unauthenticated(self): + """E2E: background task gets no token when nothing was set.""" + mcp = FastMCP("no-auth") @mcp.tool(task=True) - async def verify_context_tool(ctx: Context) -> str: - """Tool that verifies Context is wired correctly for background tasks.""" - nonlocal captured_task_id, captured_session_id, captured_is_background - - # Capture context properties - this is the key verification - captured_task_id = ctx.task_id - captured_session_id = ctx.session_id - captured_is_background = ctx.is_background_task - - task_completed.set() - return f"task_id={ctx.task_id}, is_background={ctx.is_background_task}" + async def check_token(ctx: Context) -> str: + token = get_access_token() + return "no-token" if token is None else token.token async with Client(mcp) as client: - # Start the background task - task = await client.call_tool("verify_context_tool", {}, task=True) - assert task is not None - assert task.task_id is not None - - # Wait for the task to complete - await asyncio.wait_for(task_completed.wait(), timeout=10.0) - - # Verify Context was properly wired in the background task - assert captured_task_id is not None, "Context.task_id should be set" - assert captured_session_id is not None, "Context.session_id should be set" - assert captured_is_background is True, ( - "Context.is_background_task should be True" - ) - - # Wait for task result - await task.wait(timeout=10.0) + task = await client.call_tool("check_token", {}, task=True) result = await task.result() - assert "is_background=True" in result.data - - async def test_context_elicit_full_flow_with_mocked_redis(self): - """E2E test with mocked Redis to show complete elicitation flow. - - This test demonstrates what the client sees during background task - elicitation, with a mocked Redis layer to avoid requiring real Redis. - - Flow: - 1. Tool calls ctx.elicit() in background task - 2. Elicitation stores request in Redis, sends input_required notification - 3. Simulated client sends response via handle_task_input() - 4. Tool receives response and completes - - This is the key test that fulfills Chris Guidry's request for an - "end-to-end test showing a background task that's eliciting input" - and demonstrates "what the client-side sees when this happens." - """ - import asyncio - from unittest.mock import AsyncMock, MagicMock - - from fastmcp.server.context import Context - from fastmcp.server.tasks.elicitation import handle_task_input - - # Shared Redis storage that both elicit and handle_task_input will use - redis_storage: dict[str, bytes] = {} + assert result.data == "no-token" - # Create a mock Redis that uses our shared storage - class MockRedis: - async def set( - self, key: str, value: str | bytes, ex: int | None = None - ) -> None: - redis_storage[key] = value.encode() if isinstance(value, str) else value + async def test_expired_token_returns_none(self): + """get_access_token() returns None when task token has expired.""" + from datetime import datetime, timezone - async def get(self, key: str) -> bytes | None: - return redis_storage.get(key) + from fastmcp.server.dependencies import _task_access_token - async def delete(self, *keys: str) -> None: - for key in keys: - redis_storage.pop(key, None) - - mock_redis = MockRedis() - - # Create mock context manager for redis() - class MockRedisContext: - async def __aenter__(self): - return mock_redis - - async def __aexit__(self, *args): - pass - - mock_docket = MagicMock() - mock_docket.redis = lambda: MockRedisContext() - mock_docket.key = lambda k: k + expired = AccessToken( + token="expired-jwt", + client_id="test-client", + scopes=["read"], + expires_at=int(datetime.now(timezone.utc).timestamp()) - 3600, + ) + _task_access_token.set(expired) + assert get_access_token() is None - mock_fastmcp = MagicMock() - mock_fastmcp._docket = mock_docket + async def test_valid_token_with_future_expiry(self): + """get_access_token() returns token when expiry is in the future.""" + from datetime import datetime, timezone - mock_session = MagicMock() - mock_session._fastmcp_state_prefix = "test-session-123" - mock_session.send_notification = AsyncMock() + from fastmcp.server.dependencies import _task_access_token - # Create task-aware context (as would be created in background worker) - ctx = Context( - mock_fastmcp, - session=mock_session, - task_id="test-task-456", + valid = AccessToken( + token="valid-jwt", + client_id="test-client", + scopes=["read"], + expires_at=int(datetime.now(timezone.utc).timestamp()) + 3600, ) - - # Verify context is properly configured for background task - assert ctx.is_background_task is True - assert ctx.task_id == "test-task-456" - - # Start elicit in a background task (simulating the Docket worker) - async def run_elicit(): - return await ctx.elicit("What is your name?", str) - - elicit_task = asyncio.create_task(run_elicit()) - - # Wait for elicit to store request and start polling - # The elicit_for_task function stores the request and sends notification - await asyncio.sleep(0.2) - - # ═══════════════════════════════════════════════════════════════════════ - # CLIENT PERSPECTIVE: What does the client see? - # ═══════════════════════════════════════════════════════════════════════ - - # 1. CLIENT RECEIVES: notifications/tasks/updated notification - mock_session.send_notification.assert_called() - notification = mock_session.send_notification.call_args[0][0] - assert notification.method == "notifications/tasks/updated" - - # 2. CLIENT INSPECTS: The notification metadata tells the client: - # - Which task needs input (taskId) - # - What status the task is in (input_required) - # - What message to display (statusMessage) - # - The schema for the expected response (elicitation.requestedSchema) - meta = notification._meta - related_task = meta["modelcontextprotocol.io/related-task"] - - assert related_task["taskId"] == "test-task-456" - assert related_task["status"] == "input_required" - assert related_task["statusMessage"] == "What is your name?" - assert "elicitation" in related_task - assert related_task["elicitation"]["message"] == "What is your name?" - assert "requestedSchema" in related_task["elicitation"] - - # 3. CLIENT RESPONDS: Send input via handle_task_input - # This is what a real client would do when it receives input_required - success = await handle_task_input( - task_id="test-task-456", - session_id="test-session-123", - action="accept", - content={"value": "Alice"}, - fastmcp=mock_fastmcp, + _task_access_token.set(valid) + result = get_access_token() + assert result is not None + assert result.token == "valid-jwt" + + async def test_token_without_expiry_always_valid(self): + """get_access_token() returns token when no expires_at is set.""" + from fastmcp.server.dependencies import _task_access_token + + no_expiry = AccessToken( + token="eternal-jwt", + client_id="test-client", + scopes=["read"], ) - assert success is True, "Client should successfully send input" - - # ═══════════════════════════════════════════════════════════════════════ - # TOOL PERSPECTIVE: What does the tool receive? - # ═══════════════════════════════════════════════════════════════════════ - - # Wait for elicit to receive the response and return - result = await asyncio.wait_for(elicit_task, timeout=5.0) - - # Verify the result contains what the client sent - # AcceptedElicitation has 'action' and 'data' attributes - assert result.action == "accept" - assert result.data == "Alice" # The value from content["value"] - - async def test_context_elicit_with_real_docket_memory_backend(self): - """E2E test using Docket's real memory:// backend. - - This test uses the real Docket memory backend instead of mocking Redis, - as suggested by Chris Guidry during code review. The memory:// backend - provides a fully functional in-memory Redis-like store that Docket uses - automatically when running tests. - - Flow: - 1. Create FastMCP server with task-enabled tool that calls ctx.elicit() - 2. Start the task via Client (which initializes Docket with memory://) - 3. Background task blocks waiting for client input - 4. Simulate client sending input via handle_task_input() - 5. Task resumes and completes with the elicited value - - This demonstrates the complete elicitation flow with real infrastructure. - """ - import asyncio - - from fastmcp import FastMCP - from fastmcp.client import Client - from fastmcp.server.context import Context - from fastmcp.server.tasks.elicitation import handle_task_input - - mcp = FastMCP("elicit-memory-test") + _task_access_token.set(no_expiry) + result = get_access_token() + assert result is not None + assert result.token == "eternal-jwt" - # Track task state using mutable container (avoids nonlocal) - elicit_started = asyncio.Event() - captured: dict[str, str | None] = {"task_id": None, "session_id": None} - @mcp.tool(task=True) - async def ask_for_name(ctx: Context) -> str: - """Tool that elicits user's name via background task.""" - # Capture IDs for handle_task_input call - captured["task_id"] = ctx.task_id - captured["session_id"] = ctx.session_id - elicit_started.set() - - # This will block until client sends input - result = await ctx.elicit("What is your name?", str) - - if isinstance(result, AcceptedElicitation): - return f"Hello, {result.data}!" - else: - return "Elicitation was declined or cancelled" +class TestLifespanContextInBackgroundTasks: + """Tests for lifespan_context availability in background tasks (#3095).""" - async with Client(mcp) as client: - # Start the background task - task = await client.call_tool("ask_for_name", {}, task=True) - assert task is not None - assert task.task_id is not None - - # Wait for task to reach elicit() call - await asyncio.wait_for(elicit_started.wait(), timeout=5.0) - - # Poll until handle_task_input succeeds - # We need to wait for elicit_for_task to store the "waiting" status in Redis - # before we can send input. Using fixed-interval polling (not exponential - # backoff) because we're waiting for state, not recovering from errors. - assert captured["task_id"] is not None - assert captured["session_id"] is not None - - max_attempts = 40 - poll_interval_seconds = 0.05 # 50ms - fast for tests, 2s max total - success = False - for _ in range(max_attempts): - success = await handle_task_input( - task_id=captured["task_id"], - session_id=captured["session_id"], - action="accept", - content={"value": "Bob"}, - fastmcp=mcp, - ) - if success: - break - await asyncio.sleep(poll_interval_seconds) - - assert success is True, ( - f"handle_task_input should succeed within {max_attempts * poll_interval_seconds}s" - ) + def test_lifespan_context_falls_back_to_server_result(self): + """lifespan_context reads from server when request_context is None.""" + mcp = FastMCP("test") + mcp._lifespan_result = {"db": "mock-db-connection", "cache": "mock-cache"} - # Wait for task to complete - await task.wait(timeout=10.0) - result = await task.result() + ctx = Context(mcp, task_id="test-task") + assert ctx.request_context is None + assert ctx.lifespan_context == { + "db": "mock-db-connection", + "cache": "mock-cache", + } - # Verify the tool received the elicited value and returned correctly - assert result.data == "Hello, Bob!" + def test_lifespan_context_returns_empty_dict_when_no_lifespan(self): + """lifespan_context returns {} when no lifespan is configured.""" + mcp = FastMCP("test") + ctx = Context(mcp, task_id="test-task") + assert ctx.request_context is None + assert ctx.lifespan_context == {} diff --git a/tests/server/tasks/test_notifications.py b/tests/server/tasks/test_notifications.py new file mode 100644 index 0000000000..f0e144fd41 --- /dev/null +++ b/tests/server/tasks/test_notifications.py @@ -0,0 +1,148 @@ +"""Tests for distributed notification queue (SEP-1686). + +Integration tests verify that the notification queue works end-to-end +using Client(mcp) with the real memory:// Docket backend. +No mocking of Redis, sessions, or Docket internals. +""" + +import asyncio + +import mcp.types as mcp_types + +from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.client.elicitation import ElicitResult +from fastmcp.client.messages import MessageHandler +from fastmcp.server.context import Context +from fastmcp.server.elicitation import AcceptedElicitation +from fastmcp.server.tasks.notifications import ( + get_subscriber_count, +) + + +class NotificationCaptureHandler(MessageHandler): + """Capture server notifications for test assertions.""" + + def __init__(self) -> None: + super().__init__() + self.notifications: list[mcp_types.ServerNotification] = [] + + async def on_notification(self, message: mcp_types.ServerNotification) -> None: + self.notifications.append(message) + + def for_method(self, method: str) -> list[mcp_types.ServerNotification]: + return [ + notification + for notification in self.notifications + if notification.root.method == method + ] + + +class TestNotificationIntegration: + """Integration tests for the notification queue using real Docket memory backend. + + The elicitation flow validates the full notification pipeline: + 1. Tool calls ctx.elicit() -> stores request in Redis -> pushes notification + 2. Subscriber picks up notification -> sends MCP notification to client + 3. Subscriber relays elicitation/create to client -> handler responds + 4. Relay pushes response to Redis -> BLPOP wakes tool + """ + + async def test_notification_delivered_during_elicitation(self): + """Full E2E: notification queue delivers input_required metadata to client. + + The elicitation relay handles the response via the client's + elicitation_handler. We verify both the notification metadata + structure and the end-to-end elicitation flow. + """ + mcp = FastMCP("notification-test") + notification_handler = NotificationCaptureHandler() + + @mcp.tool(task=True) + async def elicit_tool(ctx: Context) -> str: + result = await ctx.elicit("Enter value", str) + if isinstance(result, AcceptedElicitation): + return f"got: {result.data}" + return "no value" + + async def elicitation_handler(message, response_type, params, ctx): + return ElicitResult(action="accept", content={"value": "hello"}) + + async with Client( + mcp, + message_handler=notification_handler, + elicitation_handler=elicitation_handler, + ) as client: + task = await client.call_tool("elicit_tool", {}, task=True) + + await task.wait(timeout=10.0) + result = await task.result() + assert result.data == "got: hello" + + # Verify the input_required notification was delivered with metadata + notification: mcp_types.ServerNotification | None = None + candidates = notification_handler.for_method("notifications/tasks/status") + for candidate in reversed(candidates): + candidate_meta = getattr(candidate.root, "_meta", None) + related_task = ( + candidate_meta.get("io.modelcontextprotocol/related-task") + if isinstance(candidate_meta, dict) + else None + ) + if ( + isinstance(related_task, dict) + and related_task.get("status") == "input_required" + ): + notification = candidate + break + + assert notification is not None, "expected notifications/tasks/status" + task_meta = getattr(notification.root, "_meta", None) + assert isinstance(task_meta, dict) + + related_task = task_meta.get("io.modelcontextprotocol/related-task") + assert isinstance(related_task, dict) + assert related_task.get("taskId") == task.task_id + assert related_task.get("status") == "input_required" + + elicitation = related_task.get("elicitation") + assert isinstance(elicitation, dict) + assert elicitation.get("message") == "Enter value" + assert isinstance(elicitation.get("requestId"), str) + assert isinstance(elicitation.get("requestedSchema"), dict) + + async def test_subscriber_started_and_cleaned_up(self): + """Subscriber starts during background task and stops when client disconnects.""" + mcp = FastMCP("subscriber-test") + tool_started = asyncio.Event() + tool_continue = asyncio.Event() + + @mcp.tool(task=True) + async def lifecycle_tool(ctx: Context) -> str: + tool_started.set() + await asyncio.wait_for(tool_continue.wait(), timeout=10.0) + return "done" + + count_before = get_subscriber_count() + + async with Client(mcp) as client: + task = await client.call_tool("lifecycle_tool", {}, task=True) + await asyncio.wait_for(tool_started.wait(), timeout=5.0) + + # While a background task is running, subscriber should be active + count_during = get_subscriber_count() + assert count_during > count_before + + # Let the tool complete + tool_continue.set() + await task.wait(timeout=5.0) + result = await task.result() + assert result.data == "done" + + # After client disconnects, subscriber should be cleaned up + # Allow brief time for async cleanup + for _ in range(20): + if get_subscriber_count() == count_before: + break + await asyncio.sleep(0.05) + assert get_subscriber_count() == count_before diff --git a/tests/server/tasks/test_task_elicitation_relay.py b/tests/server/tasks/test_task_elicitation_relay.py new file mode 100644 index 0000000000..42362edd26 --- /dev/null +++ b/tests/server/tasks/test_task_elicitation_relay.py @@ -0,0 +1,191 @@ +"""Tests for background task elicitation relay (notifications.py). + +The relay bridges distributed background tasks to clients via the standard +MCP elicitation/create protocol. When a worker calls ctx.elicit(), the +notification subscriber detects the input_required notification and sends +an elicitation/create request to the client session. The client's +elicitation_handler fires, and the relay pushes the response to Redis +for the blocked worker. + +These tests use Client(mcp) with the real memory:// Docket backend. +""" + +import asyncio +from dataclasses import dataclass + +from pydantic import BaseModel + +from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.client.elicitation import ElicitResult +from fastmcp.server.context import Context +from fastmcp.server.elicitation import ( + AcceptedElicitation, + CancelledElicitation, + DeclinedElicitation, +) + + +class TestElicitationRelay: + """E2E tests for elicitation flowing through the standard MCP protocol.""" + + async def test_accept_via_elicitation_handler(self): + """Tool elicits, client handler accepts, tool gets the value.""" + mcp = FastMCP("relay-accept") + + @mcp.tool(task=True) + async def ask_name(ctx: Context) -> str: + result = await ctx.elicit("What is your name?", str) + if isinstance(result, AcceptedElicitation): + return f"Hello, {result.data}!" + return "No name" + + async def handler(message, response_type, params, ctx): + assert message == "What is your name?" + return ElicitResult(action="accept", content={"value": "Alice"}) + + async with Client(mcp, elicitation_handler=handler) as client: + task = await client.call_tool("ask_name", {}, task=True) + result = await task.result() + assert result.data == "Hello, Alice!" + + async def test_decline_via_elicitation_handler(self): + """Tool elicits, client handler declines, tool gets DeclinedElicitation.""" + mcp = FastMCP("relay-decline") + + @mcp.tool(task=True) + async def optional_input(ctx: Context) -> str: + result = await ctx.elicit("Provide a name?", str) + if isinstance(result, DeclinedElicitation): + return "User declined" + if isinstance(result, AcceptedElicitation): + return f"Got: {result.data}" + return "Cancelled" + + async def handler(message, response_type, params, ctx): + return ElicitResult(action="decline") + + async with Client(mcp, elicitation_handler=handler) as client: + task = await client.call_tool("optional_input", {}, task=True) + result = await task.result() + assert result.data == "User declined" + + async def test_cancel_via_elicitation_handler(self): + """Tool elicits, client handler cancels, tool gets CancelledElicitation.""" + mcp = FastMCP("relay-cancel") + + @mcp.tool(task=True) + async def cancellable(ctx: Context) -> str: + result = await ctx.elicit("Input?", str) + if isinstance(result, CancelledElicitation): + return "Cancelled" + return "Not cancelled" + + async def handler(message, response_type, params, ctx): + return ElicitResult(action="cancel") + + async with Client(mcp, elicitation_handler=handler) as client: + task = await client.call_tool("cancellable", {}, task=True) + result = await task.result() + assert result.data == "Cancelled" + + async def test_dataclass_round_trips_through_relay(self): + """Structured dataclass type round-trips through the relay.""" + mcp = FastMCP("relay-dataclass") + + @dataclass + class UserInfo: + name: str + age: int + + @mcp.tool(task=True) + async def get_user(ctx: Context) -> str: + result = await ctx.elicit("Provide user info", UserInfo) + if isinstance(result, AcceptedElicitation): + assert isinstance(result.data, UserInfo) + return f"{result.data.name} is {result.data.age}" + return "No info" + + async def handler(message, response_type, params, ctx): + return ElicitResult(action="accept", content={"name": "Bob", "age": 30}) + + async with Client(mcp, elicitation_handler=handler) as client: + task = await client.call_tool("get_user", {}, task=True) + result = await task.result() + assert result.data == "Bob is 30" + + async def test_pydantic_model_round_trips_through_relay(self): + """Structured Pydantic model round-trips through the relay.""" + mcp = FastMCP("relay-pydantic") + + class Config(BaseModel): + host: str + port: int + + @mcp.tool(task=True) + async def get_config(ctx: Context) -> str: + result = await ctx.elicit("Server config?", Config) + if isinstance(result, AcceptedElicitation): + assert isinstance(result.data, Config) + return f"{result.data.host}:{result.data.port}" + return "No config" + + async def handler(message, response_type, params, ctx): + return ElicitResult( + action="accept", content={"host": "localhost", "port": 8080} + ) + + async with Client(mcp, elicitation_handler=handler) as client: + task = await client.call_tool("get_config", {}, task=True) + result = await task.result() + assert result.data == "localhost:8080" + + async def test_multiple_sequential_elicitations(self): + """Tool calls ctx.elicit() twice, both go through the relay.""" + mcp = FastMCP("relay-multi") + + @mcp.tool(task=True) + async def two_questions(ctx: Context) -> str: + r1 = await ctx.elicit("First name?", str) + r2 = await ctx.elicit("Last name?", str) + if isinstance(r1, AcceptedElicitation) and isinstance( + r2, AcceptedElicitation + ): + return f"{r1.data} {r2.data}" + return "Incomplete" + + call_count = 0 + + async def handler(message, response_type, params, ctx): + nonlocal call_count + call_count += 1 + if call_count == 1: + assert message == "First name?" + return ElicitResult(action="accept", content={"value": "Jane"}) + else: + assert message == "Last name?" + return ElicitResult(action="accept", content={"value": "Doe"}) + + async with Client(mcp, elicitation_handler=handler) as client: + task = await client.call_tool("two_questions", {}, task=True) + result = await task.result() + assert result.data == "Jane Doe" + assert call_count == 2 + + async def test_no_elicitation_handler_returns_cancel(self): + """Without an elicitation_handler, the relay fails and task gets cancel.""" + mcp = FastMCP("relay-no-handler") + + @mcp.tool(task=True) + async def needs_input(ctx: Context) -> str: + result = await ctx.elicit("Input?", str) + if isinstance(result, CancelledElicitation): + return "Cancelled as expected" + if isinstance(result, AcceptedElicitation): + return f"Got: {result.data}" + return "Other" + + async with Client(mcp) as client: + task = await client.call_tool("needs_input", {}, task=True) + result = await asyncio.wait_for(task.result(), timeout=15.0) + assert result.data == "Cancelled as expected" diff --git a/tests/server/tasks/test_task_metadata.py b/tests/server/tasks/test_task_metadata.py index c603ff6a67..32ce2b8498 100644 --- a/tests/server/tasks/test_task_metadata.py +++ b/tests/server/tasks/test_task_metadata.py @@ -2,7 +2,7 @@ Tests for SEP-1686 related-task metadata in protocol responses. Per the spec, all task-related responses MUST include -modelcontextprotocol.io/related-task in _meta. +io.modelcontextprotocol/related-task in _meta. """ import pytest @@ -24,7 +24,7 @@ async def test_tool(value: int) -> int: async def test_tasks_get_includes_related_task_metadata(metadata_server: FastMCP): - """tasks/get response includes modelcontextprotocol.io/related-task in _meta.""" + """tasks/get response includes io.modelcontextprotocol/related-task in _meta.""" async with Client(metadata_server) as client: # Submit a task task = await client.call_tool("test_tool", {"value": 5}, task=True) @@ -40,7 +40,7 @@ async def test_tasks_get_includes_related_task_metadata(metadata_server: FastMCP async def test_tasks_result_includes_related_task_metadata(metadata_server: FastMCP): - """tasks/result response includes modelcontextprotocol.io/related-task in _meta.""" + """tasks/result response includes io.modelcontextprotocol/related-task in _meta.""" async with Client(metadata_server) as client: # Submit and complete a task task = await client.call_tool("test_tool", {"value": 7}, task=True) @@ -53,7 +53,7 @@ async def test_tasks_result_includes_related_task_metadata(metadata_server: Fast async def test_tasks_list_includes_related_task_metadata(metadata_server: FastMCP): - """tasks/list response includes modelcontextprotocol.io/related-task in _meta.""" + """tasks/list response includes io.modelcontextprotocol/related-task in _meta.""" async with Client(metadata_server) as client: # List tasks via client (which uses protocol properly) result = await client.list_tasks() diff --git a/tests/server/tasks/test_task_protocol.py b/tests/server/tasks/test_task_protocol.py index 1d1daa02c1..9c6a88d22e 100644 --- a/tests/server/tasks/test_task_protocol.py +++ b/tests/server/tasks/test_task_protocol.py @@ -48,7 +48,7 @@ async def test_task_metadata_includes_task_id_and_ttl(task_enabled_server): async def test_task_notification_sent_after_submission(task_enabled_server): - """Server sends notifications/tasks/created after task submission.""" + """Server sends an initial task status notification after submission.""" @task_enabled_server.tool(task=True) async def background_tool(message: str) -> str: diff --git a/tests/server/test_context.py b/tests/server/test_context.py index 373286d8a8..921257c897 100644 --- a/tests/server/test_context.py +++ b/tests/server/test_context.py @@ -242,6 +242,97 @@ async def store_and_read(value: str, ctx: Context) -> dict: assert data3["session_id"] != session_id_1 # Different session +class TestContextStateSerializable: + """Tests for the serializable parameter on set_state.""" + + async def test_set_state_serializable_false_stores_arbitrary_objects(self): + """Non-serializable objects can be stored with serializable=False.""" + server = FastMCP("test") + mock_session = MagicMock() + + class MyClient: + def __init__(self): + self.connected = True + + client = MyClient() + + async with Context(fastmcp=server, session=mock_session) as context: + await context.set_state("client", client, serializable=False) + result = await context.get_state("client") + assert result is client + assert result.connected is True + + async def test_set_state_serializable_false_does_not_persist_across_requests(self): + """Non-serializable state is request-scoped and gone in a new context.""" + server = FastMCP("test") + mock_session = MagicMock() + + async with Context(fastmcp=server, session=mock_session) as context: + await context.set_state("key", object(), serializable=False) + assert await context.get_state("key") is not None + + async with Context(fastmcp=server, session=mock_session) as context: + assert await context.get_state("key") is None + + async def test_set_state_serializable_true_rejects_non_serializable(self): + """Default set_state raises TypeError for non-serializable values.""" + server = FastMCP("test") + mock_session = MagicMock() + + async with Context(fastmcp=server, session=mock_session) as context: + with pytest.raises(TypeError, match="serializable=False"): + await context.set_state("key", object()) + + async def test_set_state_serializable_false_shadows_session_state(self): + """Request-scoped state shadows session-scoped state for the same key.""" + server = FastMCP("test") + mock_session = MagicMock() + + async with Context(fastmcp=server, session=mock_session) as context: + await context.set_state("key", "session-value") + assert await context.get_state("key") == "session-value" + + await context.set_state("key", "request-value", serializable=False) + assert await context.get_state("key") == "request-value" + + async def test_delete_state_removes_from_both_stores(self): + """delete_state clears both request-scoped and session-scoped values.""" + server = FastMCP("test") + mock_session = MagicMock() + + async with Context(fastmcp=server, session=mock_session) as context: + await context.set_state("key", "session-value") + await context.set_state("key", "request-value", serializable=False) + assert await context.get_state("key") == "request-value" + + await context.delete_state("key") + assert await context.get_state("key") is None + + async def test_serializable_state_still_persists_across_requests(self): + """Serializable state (default) still persists across requests.""" + server = FastMCP("test") + mock_session = MagicMock() + + async with Context(fastmcp=server, session=mock_session) as context: + await context.set_state("key", "persistent") + + async with Context(fastmcp=server, session=mock_session) as context: + assert await context.get_state("key") == "persistent" + + async def test_serializable_write_clears_request_scoped_shadow(self): + """Writing serializable state clears any request-scoped shadow for the same key.""" + server = FastMCP("test") + mock_session = MagicMock() + + async with Context(fastmcp=server, session=mock_session) as context: + await context.set_state("key", "request-value", serializable=False) + assert await context.get_state("key") == "request-value" + + # Serializable write should clear the shadow + await context.set_state("key", "session-value") + assert await context.get_state("key") == "session-value" + + class TestContextMeta: """Test suite for Context meta functionality.""" diff --git a/tests/server/test_dependencies.py b/tests/server/test_dependencies.py index 7d4119c8e4..106babecce 100644 --- a/tests/server/test_dependencies.py +++ b/tests/server/test_dependencies.py @@ -1045,3 +1045,115 @@ def my_func(name: str, db: str = Depends(get_db)) -> str: db_dep = deps["db"] assert isinstance(db_dep, _Depends) assert db_dep.dependency is get_db + + +class TestAuthDependencies: + """Tests for authentication dependencies (CurrentAccessToken, TokenClaim).""" + + def test_current_access_token_is_importable(self): + """Test that CurrentAccessToken can be imported.""" + from fastmcp.server.dependencies import CurrentAccessToken + + assert CurrentAccessToken is not None + + def test_token_claim_is_importable(self): + """Test that TokenClaim can be imported.""" + from fastmcp.server.dependencies import TokenClaim + + assert TokenClaim is not None + + def test_current_access_token_is_dependency(self): + """Test that CurrentAccessToken is a Dependency instance.""" + # Import the Dependency class the same way the code does + # (docket if available, vendored otherwise) + try: + from docket.dependencies import Dependency + except ImportError: + from fastmcp._vendor.docket_di import Dependency + + from fastmcp.server.dependencies import _CurrentAccessToken + + dep = _CurrentAccessToken() + assert isinstance(dep, Dependency) + + def test_token_claim_creates_dependency(self): + """Test that TokenClaim creates a Dependency instance.""" + # Import the Dependency class the same way the code does + try: + from docket.dependencies import Dependency + except ImportError: + from fastmcp._vendor.docket_di import Dependency + + from fastmcp.server.dependencies import TokenClaim, _TokenClaim + + dep = TokenClaim("oid") + assert isinstance(dep, _TokenClaim) + assert isinstance(dep, Dependency) + assert dep.claim_name == "oid" + + async def test_current_access_token_raises_without_token(self): + """Test that CurrentAccessToken raises when no token is available.""" + from fastmcp.server.dependencies import _CurrentAccessToken + + dep = _CurrentAccessToken() + with pytest.raises(RuntimeError, match="No access token found"): + await dep.__aenter__() + + async def test_token_claim_raises_without_token(self): + """Test that TokenClaim raises when no token is available.""" + from fastmcp.server.dependencies import _TokenClaim + + dep = _TokenClaim("oid") + with pytest.raises(RuntimeError, match="No access token available"): + await dep.__aenter__() + + async def test_current_access_token_excluded_from_tool_schema(self, mcp: FastMCP): + """Test that CurrentAccessToken dependency is excluded from tool schema.""" + import mcp.types as mcp_types + + from fastmcp.server.auth import AccessToken + from fastmcp.server.dependencies import CurrentAccessToken + + @mcp.tool() + async def tool_with_token( + name: str, + token: AccessToken = CurrentAccessToken(), + ) -> str: + return name + + result = await mcp._list_tools_mcp(mcp_types.ListToolsRequest()) + tool = next(t for t in result.tools if t.name == "tool_with_token") + + assert "name" in tool.inputSchema["properties"] + assert "token" not in tool.inputSchema["properties"] + + async def test_token_claim_excluded_from_tool_schema(self, mcp: FastMCP): + """Test that TokenClaim dependency is excluded from tool schema.""" + import mcp.types as mcp_types + + from fastmcp.server.dependencies import TokenClaim + + @mcp.tool() + async def tool_with_claim( + name: str, + user_id: str = TokenClaim("oid"), + ) -> str: + return name + + result = await mcp._list_tools_mcp(mcp_types.ListToolsRequest()) + tool = next(t for t in result.tools if t.name == "tool_with_claim") + + assert "name" in tool.inputSchema["properties"] + assert "user_id" not in tool.inputSchema["properties"] + + def test_current_access_token_exported_from_all(self): + """Test that CurrentAccessToken is exported from __all__.""" + from fastmcp.server import dependencies + + assert "CurrentAccessToken" in dependencies.__all__ + + def test_token_claim_exported_from_all(self): + """Test that TokenClaim is exported from __all__.""" + from fastmcp.server import dependencies + + assert "TokenClaim" in dependencies.__all__ diff --git a/tests/server/test_pagination.py b/tests/server/test_pagination.py index c1387fb464..f90a763c0b 100644 --- a/tests/server/test_pagination.py +++ b/tests/server/test_pagination.py @@ -2,6 +2,9 @@ from __future__ import annotations +from unittest.mock import patch + +import mcp.types import pytest from mcp.shared.exceptions import McpError @@ -241,3 +244,193 @@ def test_negative_page_size_raises(self) -> None: ValueError, match="list_page_size must be a positive integer" ): FastMCP(list_page_size=-1) + + +class TestPaginationCycleDetection: + """Tests that auto-pagination terminates when the server returns cycling cursors.""" + + async def test_tools_constant_cursor_terminates(self) -> None: + """list_tools should stop if the server always returns the same cursor.""" + server = FastMCP() + + @server.tool + def my_tool() -> str: + return "ok" + + async with Client(server) as client: + original = client.list_tools_mcp + + async def returning_constant_cursor( + *, + cursor: str | None = None, + ) -> mcp.types.ListToolsResult: + result = await original(cursor=cursor) + result.nextCursor = "stuck" + return result + + with patch.object( + client, "list_tools_mcp", side_effect=returning_constant_cursor + ): + tools = await client.list_tools() + + # Should get tools from first page + one duplicate (the retry before + # detecting the cycle), then stop. + assert len(tools) == 2 + assert all(t.name == "my_tool" for t in tools) + + async def test_prompts_constant_cursor_terminates(self) -> None: + """list_prompts should stop if the server always returns the same cursor.""" + server = FastMCP() + + @server.prompt + def my_prompt() -> str: + return "text" + + async with Client(server) as client: + original = client.list_prompts_mcp + + async def returning_constant_cursor( + *, + cursor: str | None = None, + ) -> mcp.types.ListPromptsResult: + result = await original(cursor=cursor) + result.nextCursor = "stuck" + return result + + with patch.object( + client, "list_prompts_mcp", side_effect=returning_constant_cursor + ): + prompts = await client.list_prompts() + + assert len(prompts) == 2 + assert all(p.name == "my_prompt" for p in prompts) + + async def test_resources_constant_cursor_terminates(self) -> None: + """list_resources should stop if the server always returns the same cursor.""" + server = FastMCP() + + @server.resource("test://r") + def my_resource() -> str: + return "data" + + async with Client(server) as client: + original = client.list_resources_mcp + + async def returning_constant_cursor( + *, + cursor: str | None = None, + ) -> mcp.types.ListResourcesResult: + result = await original(cursor=cursor) + result.nextCursor = "stuck" + return result + + with patch.object( + client, "list_resources_mcp", side_effect=returning_constant_cursor + ): + resources = await client.list_resources() + + assert len(resources) == 2 + assert all(r.name == "my_resource" for r in resources) + + async def test_resource_templates_constant_cursor_terminates(self) -> None: + """list_resource_templates should stop if the server always returns the same cursor.""" + server = FastMCP() + + @server.resource("test://items/{item_id}") + def my_template(item_id: str) -> str: + return item_id + + async with Client(server) as client: + original = client.list_resource_templates_mcp + + async def returning_constant_cursor( + *, + cursor: str | None = None, + ) -> mcp.types.ListResourceTemplatesResult: + result = await original(cursor=cursor) + result.nextCursor = "stuck" + return result + + with patch.object( + client, + "list_resource_templates_mcp", + side_effect=returning_constant_cursor, + ): + templates = await client.list_resource_templates() + + assert len(templates) == 2 + + async def test_cycling_cursors_terminates(self) -> None: + """list_tools should stop if the server cycles through a set of cursors.""" + server = FastMCP() + + @server.tool + def my_tool() -> str: + return "ok" + + async with Client(server) as client: + call_count = 0 + original = client.list_tools_mcp + + async def returning_cycling_cursor( + *, + cursor: str | None = None, + ) -> mcp.types.ListToolsResult: + nonlocal call_count + result = await original(cursor=cursor) + # Cycle through A -> B -> C -> A + cursors = ["A", "B", "C"] + result.nextCursor = cursors[call_count % 3] + call_count += 1 + return result + + with patch.object( + client, "list_tools_mcp", side_effect=returning_cycling_cursor + ): + tools = await client.list_tools() + + # A, B, C seen, then A is a duplicate β†’ 4 calls total + assert call_count == 4 + assert len(tools) == 4 + + async def test_empty_string_cursor_terminates(self) -> None: + """list_tools should stop if the server returns an empty string cursor.""" + server = FastMCP() + + @server.tool + def my_tool() -> str: + return "ok" + + async with Client(server) as client: + original = client.list_tools_mcp + + async def returning_empty_cursor( + *, + cursor: str | None = None, + ) -> mcp.types.ListToolsResult: + result = await original(cursor=cursor) + result.nextCursor = "" + return result + + with patch.object( + client, "list_tools_mcp", side_effect=returning_empty_cursor + ): + tools = await client.list_tools() + + assert len(tools) == 1 + assert tools[0].name == "my_tool" + + async def test_normal_pagination_unaffected(self) -> None: + """Cycle detection should not interfere with normal pagination.""" + server = FastMCP(list_page_size=10) + + for i in range(25): + + @server.tool(name=f"tool_{i}") + def make_tool() -> str: + return "ok" + + async with Client(server) as client: + tools = await client.list_tools() + assert len(tools) == 25 + assert len({t.name for t in tools}) == 25 diff --git a/tests/server/test_server.py b/tests/server/test_server.py index 12166715c5..e3a7f51250 100644 --- a/tests/server/test_server.py +++ b/tests/server/test_server.py @@ -1,4 +1,5 @@ import os +import warnings from pathlib import Path from tempfile import TemporaryDirectory from textwrap import dedent @@ -7,6 +8,7 @@ from mcp.types import TextContent, TextResourceContents from fastmcp import Client, FastMCP +from fastmcp.server.providers import LocalProvider from fastmcp.tools import FunctionTool from fastmcp.tools.tool import Tool from fastmcp.utilities.tests import temporary_settings @@ -116,6 +118,80 @@ def local_tool() -> str: assert any(t.name == "local_tool" for t in tools) +class TestLocalProviderProperty: + """Test the public local_provider property.""" + + async def test_local_provider_returns_local_provider(self): + mcp = FastMCP() + assert isinstance(mcp.local_provider, LocalProvider) + assert mcp.local_provider is mcp._local_provider + + async def test_remove_tool_via_local_provider(self): + mcp = FastMCP() + + @mcp.tool + def my_tool() -> str: + return "result" + + assert await mcp.local_provider.get_tool("my_tool") is not None + mcp.local_provider.remove_tool("my_tool") + tools = await mcp.list_tools() + assert not any(t.name == "my_tool" for t in tools) + + async def test_remove_resource_via_local_provider(self): + mcp = FastMCP() + + @mcp.resource("resource://test") + def my_resource() -> str: + return "data" + + mcp.local_provider.remove_resource("resource://test") + resources = await mcp.list_resources() + assert not any(r.uri == "resource://test" for r in resources) + + async def test_remove_prompt_via_local_provider(self): + mcp = FastMCP() + + @mcp.prompt + def my_prompt() -> str: + return "hello" + + mcp.local_provider.remove_prompt("my_prompt") + prompts = await mcp.list_prompts() + assert not any(p.name == "my_prompt" for p in prompts) + + +class TestRemoveToolDeprecation: + async def test_remove_tool_emits_deprecation_warning(self): + mcp = FastMCP() + + @mcp.tool + def my_tool() -> str: + return "result" + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + mcp.remove_tool("my_tool") + + assert len(w) == 1 + assert issubclass(w[0].category, DeprecationWarning) + assert "local_provider" in str(w[0].message) + + async def test_remove_tool_still_works(self): + mcp = FastMCP() + + @mcp.tool + def my_tool() -> str: + return "result" + + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + mcp.remove_tool("my_tool") + + tools = await mcp.list_tools() + assert not any(t.name == "my_tool" for t in tools) + + class TestResourcePrefixMounting: """Test resource prefixing in mounted servers.""" @@ -211,9 +287,9 @@ def dummy_tool() -> str: "test", middleware=(), # Empty tuple tools=(Tool.from_function(dummy_tool),), # Tuple of tools - include_tags={"tag1", "tag2"}, # Set - exclude_tags={"tag3"}, # Set ) + mcp.enable(tags={"tag1", "tag2"}, only=True) + mcp.disable(tags={"tag3"}) assert mcp is not None assert mcp.name == "test" assert isinstance(mcp.middleware, list) # Should be converted to list diff --git a/tests/server/test_session_visibility.py b/tests/server/test_session_visibility.py index ae307dfc68..887f4f3307 100644 --- a/tests/server/test_session_visibility.py +++ b/tests/server/test_session_visibility.py @@ -618,3 +618,151 @@ async def non_activated_session(session_id: str): assert results[f"non_activated_{i}"] is False, ( f"Non-activated session {i} should NOT see premium tool" ) + + +class TestSessionVisibilityResetBug: + """Regression tests for #3034: visibility marks leak via shared component mutation.""" + + async def test_disable_then_reset_restores_tools(self): + """After disable + reset within the same session, tools should reappear.""" + from fastmcp import Client + + mcp = FastMCP("test") + + @mcp.tool(tags={"system"}) + def my_tool() -> str: + return "hello" + + @mcp.tool(tags={"env"}) + async def enter_env(ctx: Context) -> str: + await ctx.disable_components(tags={"system"}) + return "entered" + + @mcp.tool(tags={"env"}) + async def exit_env(ctx: Context) -> str: + await ctx.reset_visibility() + return "exited" + + async with Client(mcp) as client: + # Tool visible initially + tools = await client.list_tools() + assert any(t.name == "my_tool" for t in tools) + + # Disable it + await client.call_tool("enter_env", {}) + tools = await client.list_tools() + assert not any(t.name == "my_tool" for t in tools) + + # Reset β€” tool should come back + await client.call_tool("exit_env", {}) + tools = await client.list_tools() + assert any(t.name == "my_tool" for t in tools), ( + "Tool should be visible again after reset_visibility" + ) + + async def test_disable_reset_loop(self): + """Repeated disable/reset cycles should work every time (the exact bug from #3034).""" + from fastmcp import Client + + mcp = FastMCP("test") + + @mcp.tool(tags={"system"}) + def create_project() -> str: + return "created" + + @mcp.tool(tags={"env"}) + async def enter_env(ctx: Context) -> str: + await ctx.disable_components(tags={"system"}) + return "entered" + + @mcp.tool(tags={"env"}) + async def exit_env(ctx: Context) -> str: + await ctx.reset_visibility() + return "exited" + + async with Client(mcp) as client: + for i in range(3): + # create_project should be visible + tools = await client.list_tools() + assert any(t.name == "create_project" for t in tools), ( + f"Iteration {i}: create_project should be visible before enter_env" + ) + + # Enter env β€” disables system tools + await client.call_tool("enter_env", {}) + tools = await client.list_tools() + assert not any(t.name == "create_project" for t in tools), ( + f"Iteration {i}: create_project should be hidden after enter_env" + ) + + # Exit env β€” reset + await client.call_tool("exit_env", {}) + + async def test_session_disable_does_not_leak_to_concurrent_session(self): + """Disabling tools in one session must not affect a concurrent session.""" + from fastmcp import Client + + mcp = FastMCP("test") + + @mcp.tool(tags={"system"}) + def shared_tool() -> str: + return "shared" + + @mcp.tool + async def disable_system(ctx: Context) -> str: + await ctx.disable_components(tags={"system"}) + return "disabled" + + session_b_sees_tool = False + ready = anyio.Event() + check_done = anyio.Event() + + async def session_a(): + async with Client(mcp) as client: + await client.call_tool("disable_system", {}) + ready.set() + await check_done.wait() + + async def session_b(): + nonlocal session_b_sees_tool + await ready.wait() + async with Client(mcp) as client: + tools = await client.list_tools() + session_b_sees_tool = any(t.name == "shared_tool" for t in tools) + check_done.set() + + async with anyio.create_task_group() as tg: + tg.start_soon(session_a) + tg.start_soon(session_b) + + assert session_b_sees_tool is True, ( + "Session B should still see shared_tool despite Session A disabling it" + ) + + async def test_session_disable_does_not_leak_to_sequential_session(self): + """Disabling tools in one session must not affect a later session.""" + from fastmcp import Client + + mcp = FastMCP("test") + + @mcp.tool(tags={"system"}) + def shared_tool() -> str: + return "shared" + + @mcp.tool + async def disable_system(ctx: Context) -> str: + await ctx.disable_components(tags={"system"}) + return "disabled" + + # Session A disables the tool (no reset) + async with Client(mcp) as client_a: + await client_a.call_tool("disable_system", {}) + tools = await client_a.list_tools() + assert not any(t.name == "shared_tool" for t in tools) + + # Session B should see it fresh + async with Client(mcp) as client_b: + tools = await client_b.list_tools() + assert any(t.name == "shared_tool" for t in tools), ( + "New session should see shared_tool regardless of previous session" + ) diff --git a/tests/server/test_tool_transformation.py b/tests/server/test_tool_transformation.py index 4f9833bdf0..4cf9388229 100644 --- a/tests/server/test_tool_transformation.py +++ b/tests/server/test_tool_transformation.py @@ -1,6 +1,12 @@ +import httpx + from fastmcp import FastMCP +from fastmcp.client import Client from fastmcp.server.transforms import ToolTransform -from fastmcp.tools.tool_transform import ToolTransformConfig +from fastmcp.tools.tool_transform import ( + ArgTransformConfig, + ToolTransformConfig, +) async def test_tool_transformation_via_layer(): @@ -207,3 +213,73 @@ def my_tool() -> str: # Tool should now be visible assert "my_tool" in tool_names + + +async def test_openapi_path_params_not_duplicated_in_description(): + """Path parameter details should live in inputSchema, not the description. + + Regression test for https://github.com/jlowin/fastmcp/issues/3130 β€” hiding + a path param via ToolTransform left stale references in the description + because the description was generated before transforms ran. The fix is to + keep parameter docs in inputSchema only, where transforms can control them. + """ + spec = { + "openapi": "3.1.0", + "info": {"title": "Test", "version": "0.1.0"}, + "paths": { + "/api/{version}/users/{user_id}": { + "get": { + "operationId": "my_endpoint", + "summary": "My endpoint", + "parameters": [ + { + "name": "version", + "in": "path", + "required": True, + "description": "API version", + "schema": {"type": "string"}, + }, + { + "name": "user_id", + "in": "path", + "required": True, + "description": "The user ID", + "schema": {"type": "string"}, + }, + ], + "responses": {"200": {"description": "OK"}}, + }, + }, + }, + } + + async with httpx.AsyncClient(base_url="http://localhost") as http_client: + mcp = FastMCP.from_openapi(openapi_spec=spec, client=http_client) + + # Hide one of the two path params + mcp.add_transform( + ToolTransform( + { + "my_endpoint": ToolTransformConfig( + arguments={ + "version": ArgTransformConfig(hide=True, default="v1"), + } + ) + } + ) + ) + + async with Client(mcp) as client: + tools = await client.list_tools() + tool = tools[0] + + # Description should be the summary only β€” no parameter details + assert tool.description == "My endpoint" + + # Hidden param gone from schema, visible param still present + assert "version" not in tool.inputSchema.get("properties", {}) + assert "user_id" in tool.inputSchema["properties"] + assert ( + tool.inputSchema["properties"]["user_id"]["description"] + == "The user ID" + ) diff --git a/tests/server/transforms/test_visibility.py b/tests/server/transforms/test_visibility.py index cf7b9f1b7e..a784af1f47 100644 --- a/tests/server/transforms/test_visibility.py +++ b/tests/server/transforms/test_visibility.py @@ -101,36 +101,39 @@ class TestMarking: def test_disable_marks_as_disabled(self): """Visibility(False, ...) marks matching components as disabled.""" tool = Tool(name="foo", parameters={}) - Visibility(False, names={"foo"})._mark_component(tool) - assert is_enabled(tool) is False + marked = Visibility(False, names={"foo"})._mark_component(tool) + assert is_enabled(marked) is False def test_enable_marks_as_enabled(self): """Visibility(True, ...) marks matching components as enabled.""" tool = Tool(name="foo", parameters={}) - Visibility(True, names={"foo"})._mark_component(tool) - assert is_enabled(tool) is True - assert tool.meta is not None - assert tool.meta["fastmcp"]["_internal"]["visibility"] is True + marked = Visibility(True, names={"foo"})._mark_component(tool) + assert is_enabled(marked) is True + assert marked.meta is not None + assert marked.meta["fastmcp"]["_internal"]["visibility"] is True def test_non_matching_unchanged(self): """Non-matching components are not modified.""" tool = Tool(name="bar", parameters={}) - Visibility(False, names={"foo"})._mark_component(tool) + result = Visibility(False, names={"foo"})._mark_component(tool) # No _internal key added - assert tool.meta is None or "_internal" not in tool.meta.get("fastmcp", {}) - assert is_enabled(tool) is True + assert result.meta is None or "_internal" not in result.meta.get("fastmcp", {}) + assert is_enabled(result) is True - def test_mutates_in_place(self): - """Marking mutates the component in place.""" + def test_returns_copy_for_matching(self): + """Marking returns a copy to avoid mutating shared provider objects.""" tool = Tool(name="foo", parameters={}) result = Visibility(False, names={"foo"})._mark_component(tool) - assert result is tool + assert result is not tool + assert is_enabled(result) is False + # Original is untouched + assert is_enabled(tool) is True def test_disable_all(self): """match_all=True disables all components.""" tool = Tool(name="anything", parameters={}) - Visibility(False, match_all=True)._mark_component(tool) - assert is_enabled(tool) is False + marked = Visibility(False, match_all=True)._mark_component(tool) + assert is_enabled(marked) is False class TestOverride: @@ -139,20 +142,20 @@ class TestOverride: def test_enable_overrides_disable(self): """An enable after disable results in enabled.""" tool = Tool(name="foo", parameters={}) - Visibility(False, names={"foo"})._mark_component(tool) - assert is_enabled(tool) is False + marked = Visibility(False, names={"foo"})._mark_component(tool) + assert is_enabled(marked) is False - Visibility(True, names={"foo"})._mark_component(tool) - assert is_enabled(tool) is True + marked = Visibility(True, names={"foo"})._mark_component(marked) + assert is_enabled(marked) is True def test_disable_overrides_enable(self): """A disable after enable results in disabled.""" tool = Tool(name="foo", parameters={}) - Visibility(True, names={"foo"})._mark_component(tool) - assert is_enabled(tool) is True + marked = Visibility(True, names={"foo"})._mark_component(tool) + assert is_enabled(marked) is True - Visibility(False, names={"foo"})._mark_component(tool) - assert is_enabled(tool) is False + marked = Visibility(False, names={"foo"})._mark_component(marked) + assert is_enabled(marked) is False class TestHelperFunctions: @@ -169,9 +172,10 @@ def test_filtering_pattern(self): Tool(name="enabled", parameters={}), Tool(name="disabled", parameters={}), ] - Visibility(False, names={"disabled"})._mark_component(tools[1]) + vis = Visibility(False, names={"disabled"}) + marked_tools = [vis._mark_component(t) for t in tools] - visible = [t for t in tools if is_enabled(t)] + visible = [t for t in marked_tools if is_enabled(t)] assert [t.name for t in visible] == ["enabled"] @@ -181,14 +185,14 @@ class TestMetadata: def test_internal_metadata_stripped_by_get_meta(self): """Internal metadata is stripped when calling get_meta().""" tool = Tool(name="foo", parameters={}) - Visibility(True, names={"foo"})._mark_component(tool) + marked = Visibility(True, names={"foo"})._mark_component(tool) # Raw meta has _internal - assert tool.meta is not None - assert "_internal" in tool.meta.get("fastmcp", {}) + assert marked.meta is not None + assert "_internal" in marked.meta.get("fastmcp", {}) # get_meta() strips it - output = tool.get_meta() + output = marked.get_meta() assert "_internal" not in output.get("fastmcp", {}) def test_user_metadata_preserved(self): diff --git a/tests/test_apps_prefab.py b/tests/test_apps_prefab.py index 603d1307f2..2a1908eb7d 100644 --- a/tests/test_apps_prefab.py +++ b/tests/test_apps_prefab.py @@ -7,9 +7,9 @@ from __future__ import annotations from mcp.types import TextContent -from prefab_ui import UIResponse from prefab_ui.components import Column, Heading, Text from prefab_ui.components.base import Component +from prefab_ui.response import UIResponse from fastmcp import Client, FastMCP from fastmcp.resources.types import TextResource diff --git a/tests/test_mcp_config.py b/tests/test_mcp_config.py index ee375dd022..0a6c22ab76 100644 --- a/tests/test_mcp_config.py +++ b/tests/test_mcp_config.py @@ -150,6 +150,11 @@ def test_parse_mcpservers_discriminator(): "args": ["hello"], }, "test_server_two": {"command": "echo", "args": ["hello"], "tools": {}}, + "test_server_three": { + "command": "echo", + "args": ["hello"], + "include_tags": ["my_tag"], + }, } mcp_config = MCPConfig.from_dict(config) @@ -157,8 +162,13 @@ def test_parse_mcpservers_discriminator(): test_server: MCPServerTypes = mcp_config.mcpServers["test_server"] assert isinstance(test_server, StdioMCPServer) + # Empty tools dict with no tags is not a meaningful transform test_server_two: MCPServerTypes = mcp_config.mcpServers["test_server_two"] - assert isinstance(test_server_two, TransformingStdioMCPServer) + assert isinstance(test_server_two, StdioMCPServer) + + # include_tags alone triggers transforming type + test_server_three: MCPServerTypes = mcp_config.mcpServers["test_server_three"] + assert isinstance(test_server_three, TransformingStdioMCPServer) canonical_mcp_config = CanonicalMCPConfig.from_dict(config) @@ -738,6 +748,48 @@ def subtract(a: int, b: int) -> int: assert "test_2_subtract" in tools_by_name +@pytest.mark.flaky(retries=3) +async def test_single_server_config_include_tags_filtering(tmp_path: Path): + """include_tags should filter tools even with a single server in the config.""" + server_script = inspect.cleandoc(""" + from fastmcp import FastMCP + + mcp = FastMCP() + + @mcp.tool(tags={"keep"}) + def add(a: int, b: int) -> int: + return a + b + + @mcp.tool + def subtract(a: int, b: int) -> int: + return a - b + + if __name__ == '__main__': + mcp.run() + """) + + script_path = tmp_path / "test.py" + script_path.write_text(server_script) + + config = { + "mcpServers": { + "test": { + "command": "python", + "args": [str(script_path)], + "include_tags": ["keep"], + }, + } + } + + client = Client(config) + + async with client: + tools = await client.list_tools() + tool_names = {tool.name for tool in tools} + assert "add" in tool_names + assert "subtract" not in tool_names + + async def test_multi_client_with_elicitation(tmp_path: Path): """ Tests that elicitation is properly forwarded to the ultimate client. diff --git a/tests/tools/tool/test_tool.py b/tests/tools/tool/test_tool.py index dd75d74432..e2976b674b 100644 --- a/tests/tools/tool/test_tool.py +++ b/tests/tools/tool/test_tool.py @@ -196,9 +196,8 @@ def create_user(user: UserInput, flag: bool) -> dict: "description": "Create a new user.", "tags": set(), "parameters": { - "additionalProperties": False, - "properties": { - "user": { + "$defs": { + "UserInput": { "properties": { "name": {"type": "string"}, "age": {"type": "integer"}, @@ -206,6 +205,10 @@ def create_user(user: UserInput, flag: bool) -> dict: "required": ["name", "age"], "type": "object", }, + }, + "additionalProperties": False, + "properties": { + "user": {"$ref": "#/$defs/UserInput"}, "flag": {"type": "boolean"}, }, "required": ["user", "flag"], diff --git a/tests/tools/tool_transform/test_schemas.py b/tests/tools/tool_transform/test_schemas.py index 51cb89f765..41aa7bbe8e 100644 --- a/tests/tools/tool_transform/test_schemas.py +++ b/tests/tools/tool_transform/test_schemas.py @@ -346,8 +346,8 @@ def test_arg_transform_examples_in_schema(self, add_tool: Tool): def test_merge_schema_with_defs_precedence(self): """Test _merge_schema_with_precedence merges $defs correctly. - Note: This tests the raw merge behavior before dereferencing. - The final schema output will be dereferenced by compress_schema. + Note: compress_schema no longer dereferences $ref by default. + Used definitions are kept in $defs; unused definitions are pruned. """ base_schema = { "type": "object", @@ -374,13 +374,17 @@ def test_merge_schema_with_defs_precedence(self): # SharedType should no longer be present on the schema (unused) assert "SharedType" not in transformed_tool_schema.get("$defs", {}) - # Schema is dereferenced so no $defs in final output + # $ref and $defs are preserved for used definitions assert transformed_tool_schema == snapshot( { "type": "object", "properties": { - "field1": {"type": "string", "description": "base"}, - "field2": {"type": "boolean"}, + "field1": {"$ref": "#/$defs/BaseType"}, + "field2": {"$ref": "#/$defs/OverrideType"}, + }, + "$defs": { + "BaseType": {"type": "string", "description": "base"}, + "OverrideType": {"type": "boolean"}, }, "required": [], "additionalProperties": False, @@ -390,8 +394,8 @@ def test_merge_schema_with_defs_precedence(self): def test_transform_tool_with_complex_defs_pruning(self): """Test that tool transformation properly handles hidden params. - With schema dereferencing, unused types are automatically removed - since $defs is eliminated entirely. + Unused type definitions are pruned from $defs when their + corresponding parameters are hidden. Used types remain as $ref. """ class UsedType(BaseModel): @@ -411,18 +415,21 @@ def complex_tool( complex_tool, transform_args={"unused_param": ArgTransform(hide=True)} ) - # Schema is dereferenced - no $defs - assert "$defs" not in transformed_tool.parameters + # UnusedType should be pruned from $defs, but UsedType remains + assert "UnusedType" not in transformed_tool.parameters.get("$defs", {}) assert transformed_tool.parameters == snapshot( { "type": "object", "properties": { - "used_param": { + "used_param": {"$ref": "#/$defs/UsedType"}, + }, + "$defs": { + "UsedType": { "properties": {"value": {"type": "string"}}, "required": ["value"], "type": "object", - } + }, }, "required": ["used_param"], "additionalProperties": False, @@ -430,7 +437,7 @@ def complex_tool( ) def test_transform_with_custom_function_preserves_needed_types(self): - """Test that custom transform functions preserve necessary types inline.""" + """Test that custom transform functions preserve necessary type definitions.""" class InputType(BaseModel): data: str @@ -452,18 +459,19 @@ async def transform_function(renamed_input: InputType): transform_args={"input_data": ArgTransform(name="renamed_input")}, ) - # Schema is dereferenced - types are inlined - assert "$defs" not in transformed.parameters - + # Used type definitions are preserved as $ref/$defs assert transformed.parameters == snapshot( { "type": "object", "properties": { - "renamed_input": { + "renamed_input": {"$ref": "#/$defs/InputType"}, + }, + "$defs": { + "InputType": { "properties": {"data": {"type": "string"}}, "required": ["data"], "type": "object", - } + }, }, "required": ["renamed_input"], "additionalProperties": False, @@ -471,7 +479,7 @@ async def transform_function(renamed_input: InputType): ) def test_chained_transforms_inline_types(self): - """Test that chained transformations produce correct inlined schemas.""" + """Test that chained transformations produce correct schemas with $ref/$defs.""" class TypeA(BaseModel): a: str @@ -492,19 +500,23 @@ def base_tool(param_a: TypeA, param_b: TypeB, param_c: TypeC) -> str: transform_args={"param_c": ArgTransform(hide=True, default=TypeC(c=True))}, ) - # Schema is dereferenced - types are inlined - assert "$defs" not in transform1.parameters + # TypeC should be pruned from $defs, TypeA and TypeB remain + assert "TypeC" not in transform1.parameters.get("$defs", {}) assert transform1.parameters == snapshot( { "type": "object", "properties": { - "param_a": { + "param_a": {"$ref": "#/$defs/TypeA"}, + "param_b": {"$ref": "#/$defs/TypeB"}, + }, + "$defs": { + "TypeA": { "properties": {"a": {"type": "string"}}, "required": ["a"], "type": "object", }, - "param_b": { + "TypeB": { "properties": {"b": {"type": "integer"}}, "required": ["b"], "type": "object", @@ -521,17 +533,21 @@ def base_tool(param_a: TypeA, param_b: TypeB, param_c: TypeC) -> str: transform_args={"param_b": ArgTransform(hide=True, default=TypeB(b=42))}, ) - assert "$defs" not in transform2.parameters + # TypeB should be pruned from $defs, only TypeA remains + assert "TypeB" not in transform2.parameters.get("$defs", {}) assert transform2.parameters == snapshot( { "type": "object", "properties": { - "param_a": { + "param_a": {"$ref": "#/$defs/TypeA"}, + }, + "$defs": { + "TypeA": { "properties": {"a": {"type": "string"}}, "required": ["a"], "type": "object", - } + }, }, "required": ["param_a"], "additionalProperties": False, diff --git a/tests/tools/tool_transform/test_tool_transform.py b/tests/tools/tool_transform/test_tool_transform.py index bc3247323f..47ab1853b2 100644 --- a/tests/tools/tool_transform/test_tool_transform.py +++ b/tests/tools/tool_transform/test_tool_transform.py @@ -205,10 +205,11 @@ def tool_with_refs(a: VisibleType, b: HiddenType | None = None) -> int: schema = new_tool.parameters # Only 'a' should be visible assert list(schema["properties"].keys()) == ["a"] - # Schema should be fully dereferenced (no $defs) - assert "$defs" not in schema - # VisibleType should be inlined in the property - assert schema["properties"]["a"] == { + # HiddenType should be pruned from $defs + assert "HiddenType" not in schema.get("$defs", {}) + # VisibleType should remain in $defs and be referenced via $ref + assert schema["properties"]["a"] == {"$ref": "#/$defs/VisibleType"} + assert schema["$defs"]["VisibleType"] == { "properties": {"x": {"type": "integer"}}, "required": ["x"], "type": "object", @@ -396,10 +397,8 @@ def parent_tool(cool_model: CoolModel) -> int: new_tool = Tool.from_tool(tool) - # Both tools should have the same dereferenced schema + # Both tools should have the same schema (with $ref/$defs preserved) assert new_tool.parameters == tool.parameters - # Schema should be fully dereferenced (no $defs) - assert "$defs" not in new_tool.parameters def test_transform_args_validation_unknown_arg(add_tool): diff --git a/tests/utilities/openapi/test_schemas.py b/tests/utilities/openapi/test_schemas.py index 54644bce6e..ceac2bdd67 100644 --- a/tests/utilities/openapi/test_schemas.py +++ b/tests/utilities/openapi/test_schemas.py @@ -581,7 +581,7 @@ def test_request_body_multiple_content_types(self): ) # Should have some properties from one of the content types def test_oneof_reference_dereferenced(self): - """Test that schemas referenced in oneOf are dereferenced.""" + """Test that schemas referenced in oneOf are preserved and unused defs pruned.""" schema = { "type": "object", @@ -594,14 +594,15 @@ def test_oneof_reference_dereferenced(self): result = compress_schema(schema) - # $defs should be removed (all refs dereferenced) - assert "$defs" not in result + # UnusedSchema should be pruned, TestSchema should be kept + assert "UnusedSchema" not in result.get("$defs", {}) + assert result["$defs"]["TestSchema"] == {"type": "string"} - # TestSchema should be inlined in oneOf - assert result["properties"]["data"]["oneOf"] == [{"type": "string"}] + # $ref should be preserved in oneOf + assert result["properties"]["data"]["oneOf"] == [{"$ref": "#/$defs/TestSchema"}] def test_anyof_reference_dereferenced(self): - """Test that schemas referenced in anyOf are dereferenced.""" + """Test that schemas referenced in anyOf are preserved and unused defs pruned.""" schema = { "type": "object", @@ -614,14 +615,15 @@ def test_anyof_reference_dereferenced(self): result = compress_schema(schema) - # $defs should be removed (all refs dereferenced) - assert "$defs" not in result + # UnusedSchema should be pruned, TestSchema should be kept + assert "UnusedSchema" not in result.get("$defs", {}) + assert result["$defs"]["TestSchema"] == {"type": "string"} - # TestSchema should be inlined in anyOf - assert result["properties"]["data"]["anyOf"] == [{"type": "string"}] + # $ref should be preserved in anyOf + assert result["properties"]["data"]["anyOf"] == [{"$ref": "#/$defs/TestSchema"}] def test_allof_reference_dereferenced(self): - """Test that schemas referenced in allOf are dereferenced.""" + """Test that schemas referenced in allOf are preserved and unused defs pruned.""" schema = { "type": "object", @@ -634,8 +636,9 @@ def test_allof_reference_dereferenced(self): result = compress_schema(schema) - # $defs should be removed (all refs dereferenced) - assert "$defs" not in result + # UnusedSchema should be pruned, TestSchema should be kept + assert "UnusedSchema" not in result.get("$defs", {}) + assert result["$defs"]["TestSchema"] == {"type": "string"} - # TestSchema should be inlined in allOf - assert result["properties"]["data"]["allOf"] == [{"type": "string"}] + # $ref should be preserved in allOf + assert result["properties"]["data"]["allOf"] == [{"$ref": "#/$defs/TestSchema"}] diff --git a/tests/utilities/test_inspect.py b/tests/utilities/test_inspect.py index 8df5064668..448e1eb038 100644 --- a/tests/utilities/test_inspect.py +++ b/tests/utilities/test_inspect.py @@ -281,10 +281,8 @@ async def test_inspect_respects_tag_filtering(self): components weren't actually available to clients. """ # Create server with include_tags that will filter out untagged components - mcp = FastMCP( - "FilteredServer", - include_tags={"fetch", "analyze", "create"}, - ) + mcp = FastMCP("FilteredServer") + mcp.enable(tags={"fetch", "analyze", "create"}, only=True) # Add tools with and without matching tags @mcp.tool(tags={"fetch"}) @@ -396,7 +394,8 @@ def blocked_prompt() -> list: return [{"role": "user", "content": "blocked"}] # Create parent server with tag filtering - parent = FastMCP("ParentServer", include_tags={"allowed"}) + parent = FastMCP("ParentServer") + parent.enable(tags={"allowed"}, only=True) parent.mount(mounted) # Get inspect info @@ -448,7 +447,8 @@ def untagged_tool() -> str: return "untagged" # Create parent with exclude_tags - should filter mounted components - parent = FastMCP("ParentServer", exclude_tags={"development"}) + parent = FastMCP("ParentServer") + parent.disable(tags={"development"}) parent.mount(mounted) # Get inspect info diff --git a/tests/utilities/test_json_schema.py b/tests/utilities/test_json_schema.py index 436beb6a29..a337156d66 100644 --- a/tests/utilities/test_json_schema.py +++ b/tests/utilities/test_json_schema.py @@ -196,8 +196,8 @@ def test_preserves_nested_siblings(self): class TestCompressSchema: """Tests for the compress_schema function.""" - def test_dereferences_by_default(self): - """Test that compress_schema dereferences $refs by default.""" + def test_preserves_refs_by_default(self): + """Test that compress_schema preserves $refs by default.""" schema = { "properties": { "foo": {"$ref": "#/$defs/foo_def"}, @@ -208,10 +208,9 @@ def test_dereferences_by_default(self): } result = compress_schema(schema) - # $ref should be inlined - assert result["properties"]["foo"] == {"type": "string"} - # $defs should be removed - assert "$defs" not in result + # $ref should be preserved (dereferencing is handled by middleware) + assert result["properties"]["foo"] == {"$ref": "#/$defs/foo_def"} + assert "$defs" in result def test_prune_params(self): """Test pruning parameters with compress_schema.""" @@ -271,7 +270,7 @@ def test_combined_operations(self): assert "remove" not in result["properties"] # Check that required list was updated assert result["required"] == ["keep"] - # Check that $defs was removed (dereferenced) + # All $defs entries are now unreferenced after pruning "remove", so they're cleaned up assert "$defs" not in result # Check that additionalProperties was removed assert "additionalProperties" not in result @@ -442,6 +441,46 @@ def test_mcp_client_compatibility_requires_additional_properties(self): ) +class TestCompressSchemaDereference: + """Tests for the dereference parameter of compress_schema.""" + + SCHEMA_WITH_REFS = { + "properties": { + "foo": {"$ref": "#/$defs/foo_def"}, + }, + "$defs": { + "foo_def": {"type": "string"}, + }, + } + + def test_dereference_true_inlines_refs(self): + result = compress_schema(self.SCHEMA_WITH_REFS, dereference=True) + assert result["properties"]["foo"] == {"type": "string"} + assert "$defs" not in result + + def test_dereference_false_preserves_refs(self): + result = compress_schema(self.SCHEMA_WITH_REFS, dereference=False) + assert result["properties"]["foo"] == {"$ref": "#/$defs/foo_def"} + assert "$defs" in result + + def test_other_optimizations_still_apply_without_dereference(self): + schema = { + "properties": { + "foo": {"$ref": "#/$defs/foo_def"}, + "bar": {"type": "integer", "title": "Bar"}, + }, + "$defs": { + "foo_def": {"type": "string"}, + }, + } + result = compress_schema( + schema, dereference=False, prune_params=["bar"], prune_titles=True + ) + assert "bar" not in result["properties"] + assert "$ref" in result["properties"]["foo"] + assert "$defs" in result + + class TestResolveRootRef: """Tests for the resolve_root_ref function. diff --git a/uv.lock b/uv.lock index 07746c7cbf..512bcd5ea1 100644 --- a/uv.lock +++ b/uv.lock @@ -97,6 +97,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd", size = 244005, upload-time = "2025-12-12T08:01:40.209Z" }, ] +[[package]] +name = "azure-core" +version = "1.38.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/1b/e503e08e755ea94e7d3419c9242315f888fc664211c90d032e40479022bf/azure_core-1.38.0.tar.gz", hash = "sha256:8194d2682245a3e4e3151a667c686464c3786fed7918b394d035bdcd61bb5993", size = 363033, upload-time = "2026-01-12T17:03:05.535Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl", hash = "sha256:ab0c9b2cd71fecb1842d52c965c95285d3cfb38902f6766e4a471f1cd8905335", size = 217825, upload-time = "2026-01-12T17:03:07.291Z" }, +] + +[[package]] +name = "azure-identity" +version = "1.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "cryptography" }, + { name = "msal" }, + { name = "msal-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/8d/1a6c41c28a37eab26dc85ab6c86992c700cd3f4a597d9ed174b0e9c69489/azure_identity-1.25.1.tar.gz", hash = "sha256:87ca8328883de6036443e1c37b40e8dc8fb74898240f61071e09d2e369361456", size = 279826, upload-time = "2025-10-06T20:30:02.194Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/7b/5652771e24fff12da9dde4c20ecf4682e606b104f26419d139758cc935a6/azure_identity-1.25.1-py3-none-any.whl", hash = "sha256:e9edd720af03dff020223cd269fa3a61e8f345ea75443858273bcb44844ab651", size = 191317, upload-time = "2025-10-06T20:30:04.251Z" }, +] + [[package]] name = "backports-asyncio-runner" version = "1.2.0" @@ -449,62 +478,62 @@ toml = [ [[package]] name = "cryptography" -version = "46.0.4" +version = "46.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/19/f748958276519adf6a0c1e79e7b8860b4830dda55ccdf29f2719b5fc499c/cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59", size = 749301, upload-time = "2026-01-28T00:24:37.379Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/99/157aae7949a5f30d51fcb1a9851e8ebd5c74bf99b5285d8bb4b8b9ee641e/cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:281526e865ed4166009e235afadf3a4c4cba6056f99336a99efba65336fd5485", size = 7173686, upload-time = "2026-01-28T00:23:07.515Z" }, - { url = "https://files.pythonhosted.org/packages/87/91/874b8910903159043b5c6a123b7e79c4559ddd1896e38967567942635778/cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc", size = 4275871, upload-time = "2026-01-28T00:23:09.439Z" }, - { url = "https://files.pythonhosted.org/packages/c0/35/690e809be77896111f5b195ede56e4b4ed0435b428c2f2b6d35046fbb5e8/cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0", size = 4423124, upload-time = "2026-01-28T00:23:11.529Z" }, - { url = "https://files.pythonhosted.org/packages/1a/5b/a26407d4f79d61ca4bebaa9213feafdd8806dc69d3d290ce24996d3cfe43/cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa", size = 4277090, upload-time = "2026-01-28T00:23:13.123Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d8/4bb7aec442a9049827aa34cee1aa83803e528fa55da9a9d45d01d1bb933e/cryptography-46.0.4-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5aa3e463596b0087b3da0dbe2b2487e9fc261d25da85754e30e3b40637d61f81", size = 4947652, upload-time = "2026-01-28T00:23:14.554Z" }, - { url = "https://files.pythonhosted.org/packages/2b/08/f83e2e0814248b844265802d081f2fac2f1cbe6cd258e72ba14ff006823a/cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0a9ad24359fee86f131836a9ac3bffc9329e956624a2d379b613f8f8abaf5255", size = 4455157, upload-time = "2026-01-28T00:23:16.443Z" }, - { url = "https://files.pythonhosted.org/packages/0a/05/19d849cf4096448779d2dcc9bb27d097457dac36f7273ffa875a93b5884c/cryptography-46.0.4-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:dc1272e25ef673efe72f2096e92ae39dea1a1a450dd44918b15351f72c5a168e", size = 3981078, upload-time = "2026-01-28T00:23:17.838Z" }, - { url = "https://files.pythonhosted.org/packages/e6/89/f7bac81d66ba7cde867a743ea5b37537b32b5c633c473002b26a226f703f/cryptography-46.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:de0f5f4ec8711ebc555f54735d4c673fc34b65c44283895f1a08c2b49d2fd99c", size = 4276213, upload-time = "2026-01-28T00:23:19.257Z" }, - { url = "https://files.pythonhosted.org/packages/da/9f/7133e41f24edd827020ad21b068736e792bc68eecf66d93c924ad4719fb3/cryptography-46.0.4-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:eeeb2e33d8dbcccc34d64651f00a98cb41b2dc69cef866771a5717e6734dfa32", size = 4912190, upload-time = "2026-01-28T00:23:21.244Z" }, - { url = "https://files.pythonhosted.org/packages/a6/f7/6d43cbaddf6f65b24816e4af187d211f0bc536a29961f69faedc48501d8e/cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616", size = 4454641, upload-time = "2026-01-28T00:23:22.866Z" }, - { url = "https://files.pythonhosted.org/packages/9e/4f/ebd0473ad656a0ac912a16bd07db0f5d85184924e14fc88feecae2492834/cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0", size = 4405159, upload-time = "2026-01-28T00:23:25.278Z" }, - { url = "https://files.pythonhosted.org/packages/d1/f7/7923886f32dc47e27adeff8246e976d77258fd2aa3efdd1754e4e323bf49/cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0", size = 4666059, upload-time = "2026-01-28T00:23:26.766Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a7/0fca0fd3591dffc297278a61813d7f661a14243dd60f499a7a5b48acb52a/cryptography-46.0.4-cp311-abi3-win32.whl", hash = "sha256:82a62483daf20b8134f6e92898da70d04d0ef9a75829d732ea1018678185f4f5", size = 3026378, upload-time = "2026-01-28T00:23:28.317Z" }, - { url = "https://files.pythonhosted.org/packages/2d/12/652c84b6f9873f0909374864a57b003686c642ea48c84d6c7e2c515e6da5/cryptography-46.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:6225d3ebe26a55dbc8ead5ad1265c0403552a63336499564675b29eb3184c09b", size = 3478614, upload-time = "2026-01-28T00:23:30.275Z" }, - { url = "https://files.pythonhosted.org/packages/b9/27/542b029f293a5cce59349d799d4d8484b3b1654a7b9a0585c266e974a488/cryptography-46.0.4-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:485e2b65d25ec0d901bca7bcae0f53b00133bf3173916d8e421f6fddde103908", size = 7116417, upload-time = "2026-01-28T00:23:31.958Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f5/559c25b77f40b6bf828eabaf988efb8b0e17b573545edb503368ca0a2a03/cryptography-46.0.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:078e5f06bd2fa5aea5a324f2a09f914b1484f1d0c2a4d6a8a28c74e72f65f2da", size = 4264508, upload-time = "2026-01-28T00:23:34.264Z" }, - { url = "https://files.pythonhosted.org/packages/49/a1/551fa162d33074b660dc35c9bc3616fefa21a0e8c1edd27b92559902e408/cryptography-46.0.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dce1e4f068f03008da7fa51cc7abc6ddc5e5de3e3d1550334eaf8393982a5829", size = 4409080, upload-time = "2026-01-28T00:23:35.793Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/4d8d129a755f5d6df1bbee69ea2f35ebfa954fa1847690d1db2e8bca46a5/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2067461c80271f422ee7bdbe79b9b4be54a5162e90345f86a23445a0cf3fd8a2", size = 4270039, upload-time = "2026-01-28T00:23:37.263Z" }, - { url = "https://files.pythonhosted.org/packages/4c/f5/ed3fcddd0a5e39321e595e144615399e47e7c153a1fb8c4862aec3151ff9/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:c92010b58a51196a5f41c3795190203ac52edfd5dc3ff99149b4659eba9d2085", size = 4926748, upload-time = "2026-01-28T00:23:38.884Z" }, - { url = "https://files.pythonhosted.org/packages/43/ae/9f03d5f0c0c00e85ecb34f06d3b79599f20630e4db91b8a6e56e8f83d410/cryptography-46.0.4-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:829c2b12bbc5428ab02d6b7f7e9bbfd53e33efd6672d21341f2177470171ad8b", size = 4442307, upload-time = "2026-01-28T00:23:40.56Z" }, - { url = "https://files.pythonhosted.org/packages/8b/22/e0f9f2dae8040695103369cf2283ef9ac8abe4d51f68710bec2afd232609/cryptography-46.0.4-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:62217ba44bf81b30abaeda1488686a04a702a261e26f87db51ff61d9d3510abd", size = 3959253, upload-time = "2026-01-28T00:23:42.827Z" }, - { url = "https://files.pythonhosted.org/packages/01/5b/6a43fcccc51dae4d101ac7d378a8724d1ba3de628a24e11bf2f4f43cba4d/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:9c2da296c8d3415b93e6053f5a728649a87a48ce084a9aaf51d6e46c87c7f2d2", size = 4269372, upload-time = "2026-01-28T00:23:44.655Z" }, - { url = "https://files.pythonhosted.org/packages/17/b7/0f6b8c1dd0779df2b526e78978ff00462355e31c0a6f6cff8a3e99889c90/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:9b34d8ba84454641a6bf4d6762d15847ecbd85c1316c0a7984e6e4e9f748ec2e", size = 4891908, upload-time = "2026-01-28T00:23:46.48Z" }, - { url = "https://files.pythonhosted.org/packages/83/17/259409b8349aa10535358807a472c6a695cf84f106022268d31cea2b6c97/cryptography-46.0.4-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:df4a817fa7138dd0c96c8c8c20f04b8aaa1fac3bbf610913dcad8ea82e1bfd3f", size = 4441254, upload-time = "2026-01-28T00:23:48.403Z" }, - { url = "https://files.pythonhosted.org/packages/9c/fe/e4a1b0c989b00cee5ffa0764401767e2d1cf59f45530963b894129fd5dce/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b1de0ebf7587f28f9190b9cb526e901bf448c9e6a99655d2b07fff60e8212a82", size = 4396520, upload-time = "2026-01-28T00:23:50.26Z" }, - { url = "https://files.pythonhosted.org/packages/b3/81/ba8fd9657d27076eb40d6a2f941b23429a3c3d2f56f5a921d6b936a27bc9/cryptography-46.0.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9b4d17bc7bd7cdd98e3af40b441feaea4c68225e2eb2341026c84511ad246c0c", size = 4651479, upload-time = "2026-01-28T00:23:51.674Z" }, - { url = "https://files.pythonhosted.org/packages/00/03/0de4ed43c71c31e4fe954edd50b9d28d658fef56555eba7641696370a8e2/cryptography-46.0.4-cp314-cp314t-win32.whl", hash = "sha256:c411f16275b0dea722d76544a61d6421e2cc829ad76eec79280dbdc9ddf50061", size = 3001986, upload-time = "2026-01-28T00:23:53.485Z" }, - { url = "https://files.pythonhosted.org/packages/5c/70/81830b59df7682917d7a10f833c4dab2a5574cd664e86d18139f2b421329/cryptography-46.0.4-cp314-cp314t-win_amd64.whl", hash = "sha256:728fedc529efc1439eb6107b677f7f7558adab4553ef8669f0d02d42d7b959a7", size = 3468288, upload-time = "2026-01-28T00:23:55.09Z" }, - { url = "https://files.pythonhosted.org/packages/56/f7/f648fdbb61d0d45902d3f374217451385edc7e7768d1b03ff1d0e5ffc17b/cryptography-46.0.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a9556ba711f7c23f77b151d5798f3ac44a13455cc68db7697a1096e6d0563cab", size = 7169583, upload-time = "2026-01-28T00:23:56.558Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cc/8f3224cbb2a928de7298d6ed4790f5ebc48114e02bdc9559196bfb12435d/cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef", size = 4275419, upload-time = "2026-01-28T00:23:58.364Z" }, - { url = "https://files.pythonhosted.org/packages/17/43/4a18faa7a872d00e4264855134ba82d23546c850a70ff209e04ee200e76f/cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d", size = 4419058, upload-time = "2026-01-28T00:23:59.867Z" }, - { url = "https://files.pythonhosted.org/packages/ee/64/6651969409821d791ba12346a124f55e1b76f66a819254ae840a965d4b9c/cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973", size = 4278151, upload-time = "2026-01-28T00:24:01.731Z" }, - { url = "https://files.pythonhosted.org/packages/20/0b/a7fce65ee08c3c02f7a8310cc090a732344066b990ac63a9dfd0a655d321/cryptography-46.0.4-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:a90e43e3ef65e6dcf969dfe3bb40cbf5aef0d523dff95bfa24256be172a845f4", size = 4939441, upload-time = "2026-01-28T00:24:03.175Z" }, - { url = "https://files.pythonhosted.org/packages/db/a7/20c5701e2cd3e1dfd7a19d2290c522a5f435dd30957d431dcb531d0f1413/cryptography-46.0.4-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a05177ff6296644ef2876fce50518dffb5bcdf903c85250974fc8bc85d54c0af", size = 4451617, upload-time = "2026-01-28T00:24:05.403Z" }, - { url = "https://files.pythonhosted.org/packages/00/dc/3e16030ea9aa47b63af6524c354933b4fb0e352257c792c4deeb0edae367/cryptography-46.0.4-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:daa392191f626d50f1b136c9b4cf08af69ca8279d110ea24f5c2700054d2e263", size = 3977774, upload-time = "2026-01-28T00:24:06.851Z" }, - { url = "https://files.pythonhosted.org/packages/42/c8/ad93f14118252717b465880368721c963975ac4b941b7ef88f3c56bf2897/cryptography-46.0.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e07ea39c5b048e085f15923511d8121e4a9dc45cee4e3b970ca4f0d338f23095", size = 4277008, upload-time = "2026-01-28T00:24:08.926Z" }, - { url = "https://files.pythonhosted.org/packages/00/cf/89c99698151c00a4631fbfcfcf459d308213ac29e321b0ff44ceeeac82f1/cryptography-46.0.4-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:d5a45ddc256f492ce42a4e35879c5e5528c09cd9ad12420828c972951d8e016b", size = 4903339, upload-time = "2026-01-28T00:24:12.009Z" }, - { url = "https://files.pythonhosted.org/packages/03/c3/c90a2cb358de4ac9309b26acf49b2a100957e1ff5cc1e98e6c4996576710/cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019", size = 4451216, upload-time = "2026-01-28T00:24:13.975Z" }, - { url = "https://files.pythonhosted.org/packages/96/2c/8d7f4171388a10208671e181ca43cdc0e596d8259ebacbbcfbd16de593da/cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4", size = 4404299, upload-time = "2026-01-28T00:24:16.169Z" }, - { url = "https://files.pythonhosted.org/packages/e9/23/cbb2036e450980f65c6e0a173b73a56ff3bccd8998965dea5cc9ddd424a5/cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b", size = 4664837, upload-time = "2026-01-28T00:24:17.629Z" }, - { url = "https://files.pythonhosted.org/packages/0a/21/f7433d18fe6d5845329cbdc597e30caf983229c7a245bcf54afecc555938/cryptography-46.0.4-cp38-abi3-win32.whl", hash = "sha256:0563655cb3c6d05fb2afe693340bc050c30f9f34e15763361cf08e94749401fc", size = 3009779, upload-time = "2026-01-28T00:24:20.198Z" }, - { url = "https://files.pythonhosted.org/packages/3a/6a/bd2e7caa2facffedf172a45c1a02e551e6d7d4828658c9a245516a598d94/cryptography-46.0.4-cp38-abi3-win_amd64.whl", hash = "sha256:fa0900b9ef9c49728887d1576fd8d9e7e3ea872fa9b25ef9b64888adc434e976", size = 3466633, upload-time = "2026-01-28T00:24:21.851Z" }, - { url = "https://files.pythonhosted.org/packages/59/e0/f9c6c53e1f2a1c2507f00f2faba00f01d2f334b35b0fbfe5286715da2184/cryptography-46.0.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:766330cce7416c92b5e90c3bb71b1b79521760cdcfc3a6a1a182d4c9fab23d2b", size = 3476316, upload-time = "2026-01-28T00:24:24.144Z" }, - { url = "https://files.pythonhosted.org/packages/27/7a/f8d2d13227a9a1a9fe9c7442b057efecffa41f1e3c51d8622f26b9edbe8f/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c236a44acfb610e70f6b3e1c3ca20ff24459659231ef2f8c48e879e2d32b73da", size = 4216693, upload-time = "2026-01-28T00:24:25.758Z" }, - { url = "https://files.pythonhosted.org/packages/c5/de/3787054e8f7972658370198753835d9d680f6cd4a39df9f877b57f0dd69c/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8a15fb869670efa8f83cbffbc8753c1abf236883225aed74cd179b720ac9ec80", size = 4382765, upload-time = "2026-01-28T00:24:27.577Z" }, - { url = "https://files.pythonhosted.org/packages/8a/5f/60e0afb019973ba6a0b322e86b3d61edf487a4f5597618a430a2a15f2d22/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:fdc3daab53b212472f1524d070735b2f0c214239df131903bae1d598016fa822", size = 4216066, upload-time = "2026-01-28T00:24:29.056Z" }, - { url = "https://files.pythonhosted.org/packages/81/8e/bf4a0de294f147fee66f879d9bae6f8e8d61515558e3d12785dd90eca0be/cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:44cc0675b27cadb71bdbb96099cca1fa051cd11d2ade09e5cd3a2edb929ed947", size = 4382025, upload-time = "2026-01-28T00:24:30.681Z" }, - { url = "https://files.pythonhosted.org/packages/79/f4/9ceb90cfd6a3847069b0b0b353fd3075dc69b49defc70182d8af0c4ca390/cryptography-46.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be8c01a7d5a55f9a47d1888162b76c8f49d62b234d88f0ff91a9fbebe32ffbc3", size = 3406043, upload-time = "2026-01-28T00:24:32.236Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, + { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, + { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, + { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, + { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, + { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, + { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, + { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, + { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, + { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, + { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, + { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, + { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, + { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, + { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, + { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, + { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, + { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, + { url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" }, + { url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" }, + { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, ] [[package]] @@ -710,6 +739,9 @@ anthropic = [ apps = [ { name = "prefab-ui" }, ] +azure = [ + { name = "azure-identity" }, +] openai = [ { name = "openai" }, ] @@ -721,7 +753,7 @@ tasks = [ dev = [ { name = "dirty-equals" }, { name = "fastapi" }, - { name = "fastmcp", extra = ["anthropic", "apps", "openai", "tasks"] }, + { name = "fastmcp", extra = ["anthropic", "apps", "azure", "openai", "tasks"] }, { name = "inline-snapshot", extra = ["dirty-equals"] }, { name = "ipython", version = "8.38.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "ipython", version = "9.9.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, @@ -751,6 +783,7 @@ dev = [ requires-dist = [ { name = "anthropic", marker = "extra == 'anthropic'", specifier = ">=0.40.0" }, { name = "authlib", specifier = ">=1.6.5" }, + { name = "azure-identity", marker = "extra == 'azure'", specifier = ">=1.16.0" }, { name = "cyclopts", specifier = ">=4.0.0" }, { name = "exceptiongroup", specifier = ">=1.2.2" }, { name = "httpx", specifier = ">=0.28.1,<1.0" }, @@ -763,7 +796,7 @@ requires-dist = [ { name = "packaging", specifier = ">=24.0" }, { name = "platformdirs", specifier = ">=4.0.0" }, { name = "prefab-ui", marker = "extra == 'apps'", editable = "../prefab" }, - { name = "py-key-value-aio", extras = ["disk", "keyring", "memory"], specifier = ">=0.3.0,<0.4.0" }, + { name = "py-key-value-aio", extras = ["disk", "keyring", "memory"], specifier = ">=0.4.0,<0.5.0" }, { name = "pydantic", extras = ["email"], specifier = ">=2.11.7" }, { name = "pydocket", marker = "extra == 'tasks'", specifier = ">=0.17.2" }, { name = "pyperclip", specifier = ">=1.9.0" }, @@ -774,13 +807,13 @@ requires-dist = [ { name = "watchfiles", specifier = ">=1.0.0" }, { name = "websockets", specifier = ">=15.0.1" }, ] -provides-extras = ["anthropic", "apps", "openai", "tasks"] +provides-extras = ["anthropic", "apps", "azure", "openai", "tasks"] [package.metadata.requires-dev] dev = [ { name = "dirty-equals", specifier = ">=0.9.0" }, { name = "fastapi", specifier = ">=0.115.12" }, - { name = "fastmcp", extras = ["anthropic", "apps", "openai", "tasks"] }, + { name = "fastmcp", extras = ["anthropic", "apps", "azure", "openai", "tasks"] }, { name = "inline-snapshot", extras = ["dirty-equals"], specifier = ">=0.27.2" }, { name = "ipython", specifier = ">=8.12.3" }, { name = "loq", specifier = ">=0.1.0a3" }, @@ -1417,6 +1450,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, ] +[[package]] +name = "msal" +version = "1.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" }, +] + +[[package]] +name = "msal-extensions" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "msal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315, upload-time = "2025-03-14T23:51:03.902Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, +] + [[package]] name = "openai" version = "2.16.0" @@ -1746,15 +1805,15 @@ wheels = [ [[package]] name = "py-key-value-aio" -version = "0.3.0" +version = "0.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beartype" }, - { name = "py-key-value-shared" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/ce/3136b771dddf5ac905cc193b461eb67967cf3979688c6696e1f2cdcde7ea/py_key_value_aio-0.3.0.tar.gz", hash = "sha256:858e852fcf6d696d231266da66042d3355a7f9871650415feef9fca7a6cd4155", size = 50801, upload-time = "2025-11-17T16:50:04.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d5/42/4397b26c564a7428fbb424c353fc416c5954609c149b6d629255f65e6dc9/py_key_value_aio-0.4.0.tar.gz", hash = "sha256:55be4942bf5d5a40aa9d6eae443425096fe1bec6af7571502e54240ce3597189", size = 89104, upload-time = "2026-02-10T23:05:51.35Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/99/10/72f6f213b8f0bce36eff21fda0a13271834e9eeff7f9609b01afdc253c79/py_key_value_aio-0.3.0-py3-none-any.whl", hash = "sha256:1c781915766078bfd608daa769fefb97e65d1d73746a3dfb640460e322071b64", size = 96342, upload-time = "2025-11-17T16:50:03.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/34/83fb1612bfdd68ef6a47b036dd0f906f943dac10844b43242a5c39395013/py_key_value_aio-0.4.0-py3-none-any.whl", hash = "sha256:962fe40cb763b2853a8f7484e9271dcbd8bf41679f4c391e54bfee4a7ca89c84", size = 148756, upload-time = "2026-02-10T23:05:50.342Z" }, ] [package.optional-dependencies] @@ -1772,19 +1831,6 @@ redis = [ { name = "redis" }, ] -[[package]] -name = "py-key-value-shared" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "beartype" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7b/e4/1971dfc4620a3a15b4579fe99e024f5edd6e0967a71154771a059daff4db/py_key_value_shared-0.3.0.tar.gz", hash = "sha256:8fdd786cf96c3e900102945f92aa1473138ebe960ef49da1c833790160c28a4b", size = 11666, upload-time = "2025-11-17T16:50:06.849Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/e4/b8b0a03ece72f47dce2307d36e1c34725b7223d209fc679315ffe6a4e2c3/py_key_value_shared-0.3.0-py3-none-any.whl", hash = "sha256:5b0efba7ebca08bb158b1e93afc2f07d30b8f40c2fc12ce24a4c0d84f42f9298", size = 19560, upload-time = "2025-11-17T16:50:05.954Z" }, -] - [[package]] name = "pycparser" version = "3.0" diff --git a/v3-notes/get-methods-consolidation.md b/v3-notes/get-methods-consolidation.md index 6a67487e4e..70b64c9c7c 100644 --- a/v3-notes/get-methods-consolidation.md +++ b/v3-notes/get-methods-consolidation.md @@ -1,6 +1,6 @@ -# Consolidating get_* and _list_* Methods +# Consolidating Discovery Methods -This document captures the design decision to consolidate component listing methods in FastMCP 3.0. +This document captures the design decisions around component listing methods in FastMCP 3.0. ## Problem @@ -14,20 +14,16 @@ These were nearly identical but with subtle differences in dedup keys, logging, ## Solution -`get_*` is now the canonical method. The `_list_*` methods were deleted entirely. +The duplicate methods were consolidated into a single set of `list_*` methods. The old `get_*` plural methods and `_list_*` internal methods were both removed. + +This happened in two phases: + +1. **Consolidation** (Dec 2025): Merged `get_*` and `_list_*` into a single `get_*` method with an `apply_middleware` parameter. +2. **Rename** (Jan 2026): When `FastMCP` was refactored to inherit from `Provider`, the methods were renamed to `list_*` to align with the `Provider` interface. The `apply_middleware` parameter was renamed to `run_middleware` with a default of `True`. ```python -async def get_tools(self, *, apply_middleware: bool = False) -> list[Tool]: +async def list_tools(self, *, run_middleware: bool = True) -> Sequence[Tool]: """Canonical method for listing tools.""" - if apply_middleware: - # Apply middleware chain (for MCP protocol handlers) - mw_context = MiddlewareContext(...) - return await self._apply_middleware( - context=mw_context, - call_next=lambda context: self.get_tools(apply_middleware=False) - ) - - # Core implementation: query providers, dedupe, filter visibility ... ``` @@ -38,32 +34,28 @@ async def get_tools(self, *, apply_middleware: bool = False) -> list[Tool]: The dict return type was removed because the key was redundantβ€”components already have `.name` or `.uri` attributes. ```python -# Before +# Before (v2.x) tools = await server.get_tools() tool = tools["my_tool"] -# After -tools = await server.get_tools() +# After (v3.0) +tools = await server.list_tools() tool = next(t for t in tools if t.name == "my_tool") ``` ### Middleware via Parameter -The `apply_middleware=True` parameter applies the middleware chain. This replaces the separate `_list_*_middleware()` methods. - -Callers: -- MCP protocol handlers: `get_tools(apply_middleware=True)` -- Direct access: `get_tools()` (default False) +The `run_middleware=True` parameter (default) applies the middleware chain. This replaces the separate `_list_*_middleware()` methods. ## Benefits 1. **Single source of truth** - One method, not two 2. **Consistent behavior** - Same dedup key, same visibility filtering 3. **Clearer API** - Public method with explicit middleware opt-in -4. **Less code** - Deleted ~200 lines of duplicate implementation +4. **Provider alignment** - `FastMCP.list_tools()` overrides `Provider.list_tools()` +5. **Less code** - Deleted ~200 lines of duplicate implementation ## Implementation Files -- `src/fastmcp/server/server.py` - Canonical `get_*` methods -- `src/fastmcp/server/providers/fastmcp_provider.py` - Uses `apply_middleware=True` -- `src/fastmcp/utilities/inspect.py` - Uses `apply_middleware=True` +- `src/fastmcp/server/server.py` - Canonical `list_*` methods +- `src/fastmcp/server/providers/` - Provider base class defines the interface