diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 548696b55a..c58a64632c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,7 +26,7 @@ repos: hooks: - id: ty name: ty check - entry: uv run ty check + entry: uv run --isolated ty check language: system types: [python] files: ^src/|^tests/ diff --git a/docs/clients/tasks.mdx b/docs/clients/tasks.mdx new file mode 100644 index 0000000000..59893957d4 --- /dev/null +++ b/docs/clients/tasks.mdx @@ -0,0 +1,155 @@ +--- +title: Background Tasks +sidebarTitle: Background Tasks +description: Execute operations asynchronously and track their progress +icon: clock +tag: "NEW" +--- + +import { VersionBadge } from "/snippets/version-badge.mdx" + + + +The [MCP task protocol](https://modelcontextprotocol.io/specification/2025-11-25/basic/utilities/tasks) lets you request operations to run asynchronously. This returns a Task object immediately, letting you track progress, cancel operations, or await results. + +See [Server Background Tasks](/servers/tasks) for how to enable this on the server side. + +## Requesting Background Execution + +Pass `task=True` to run an operation as a background task: + +```python +from fastmcp import Client + +async with Client(server) as client: + # Start a background task + task = await client.call_tool("slow_computation", {"duration": 10}, task=True) + + print(f"Task started: {task.task_id}") + + # Do other work while it runs... + + # Get the result when ready + result = await task.result() +``` + +This works with all three operation types: + +```python +# Tools +tool_task = await client.call_tool("my_tool", args, task=True) + +# Resources +resource_task = await client.read_resource("file://large.txt", task=True) + +# Prompts +prompt_task = await client.get_prompt("my_prompt", args, task=True) +``` + +## Task Objects + +All task types share a common interface: + +### Getting Results + +```python +task = await client.call_tool("analyze", {"text": "hello"}, task=True) + +# Wait for and get the result +result = await task.result() + +# Or use await directly (shorthand for .result()) +result = await task +``` + +### Checking Status + +```python +status = await task.status() + +print(f"Status: {status.status}") # "working", "completed", "failed", "cancelled" +print(f"Message: {status.statusMessage}") # Progress message from server +``` + +### Waiting for Completion + +```python +# Wait for task to complete (with timeout) +status = await task.wait(timeout=30.0) + +# Wait for a specific state +status = await task.wait(state="completed", timeout=30.0) +``` + +### Cancelling Tasks + +```python +await task.cancel() +``` + +### Status Notifications + +Register callbacks to receive real-time status updates: + +```python +def on_status_change(status): + print(f"Task {status.taskId}: {status.status} - {status.statusMessage}") + +task.on_status_change(on_status_change) + +# Async callbacks also supported +async def on_status_async(status): + await log_status(status) + +task.on_status_change(on_status_async) +``` + +## Graceful Degradation + +You can always pass `task=True` regardless of whether the server supports background tasks. Per the [MCP specification](https://modelcontextprotocol.io/specification/2025-11-25/basic/utilities/tasks), servers that don't support tasks will execute the operation immediately and return the result inline. Your code works either way: + +```python +task = await client.call_tool("my_tool", args, task=True) + +if task.returned_immediately: + print("Server executed immediately (no background support)") +else: + print("Running in background") + +# Either way, this works +result = await task.result() +``` + +This means you can write task-aware client code without worrying about server capabilities—the Task API provides a consistent interface whether the operation runs in the background or completes immediately. + +## Complete Example + +```python +import asyncio +from fastmcp import Client + +async def main(): + async with Client(server) as client: + # Start background task + task = await client.call_tool( + "slow_computation", + {"duration": 10}, + task=True, + ) + + # Subscribe to updates + def on_update(status): + print(f"Progress: {status.statusMessage}") + + task.on_status_change(on_update) + + # Do other work + print("Doing other work while task runs...") + await asyncio.sleep(2) + + # Wait for completion and get result + result = await task.result() + print(f"Result: {result.data}") + +asyncio.run(main()) +``` diff --git a/docs/docs.json b/docs/docs.json index b206966510..53ab8b9e8c 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -116,7 +116,8 @@ "servers/progress", "servers/proxy", "servers/sampling", - "servers/storage-backends" + "servers/storage-backends", + "servers/tasks" ] }, { @@ -171,6 +172,7 @@ "clients/logging", "clients/progress", "clients/sampling", + "clients/tasks", "clients/messages", "clients/roots" ] diff --git a/docs/servers/context.mdx b/docs/servers/context.mdx index 4abd6dc900..6d2d4e692a 100644 --- a/docs/servers/context.mdx +++ b/docs/servers/context.mdx @@ -8,6 +8,10 @@ import { VersionBadge } from '/snippets/version-badge.mdx' When defining FastMCP [tools](/servers/tools), [resources](/servers/resources), resource templates, or [prompts](/servers/prompts), your functions might need to interact with the underlying MCP session or access advanced server capabilities. FastMCP provides the `Context` object for this purpose. + +FastMCP uses [Docket](https://github.com/chrisguidry/docket)'s dependency injection system for managing runtime dependencies. This page covers Context and the built-in dependencies; see [Custom Dependencies](#custom-dependencies) for creating your own. + + ## What Is Context? The `Context` object provides a clean interface to access MCP features within your functions, including: @@ -24,80 +28,73 @@ The `Context` object provides a clean interface to access MCP features within yo ## Accessing the Context -### Via Dependency Injection - -To use the context object within any of your functions, simply add a parameter to your function signature and type-hint it as `Context`. FastMCP will automatically inject the context instance when your function is called. + -**Key Points:** - -- The parameter name (e.g., `ctx`, `context`) doesn't matter, only the type hint `Context` is important. -- The context parameter can be placed anywhere in your function's signature; it will not be exposed to MCP clients as a valid parameter. -- The context is optional - functions that don't need it can omit the parameter entirely. -- Context methods are async, so your function usually needs to be async as well. -- The type hint can be a union (`Context | None`) or use `Annotated[]` and it will still work properly. -- **Each MCP request receives a new context object.** Context is scoped to a single request; state or data set in one request will not be available in subsequent requests. -- Context is only available during a request; attempting to use context methods outside a request will raise errors. If you need to debug or call your context methods outside of a request, you can type your variable as `Context | None=None` to avoid missing argument errors. - -#### Tools +The preferred way to access context is using the `CurrentContext()` dependency: ```python {1, 6} -from fastmcp import FastMCP, Context +from fastmcp import FastMCP +from fastmcp.dependencies import CurrentContext +from fastmcp.server.context import Context mcp = FastMCP(name="Context Demo") @mcp.tool -async def process_file(file_uri: str, ctx: Context) -> str: +async def process_file(file_uri: str, ctx: Context = CurrentContext()) -> str: """Processes a file, using context for logging and resource access.""" - # Context is available as the ctx parameter + await ctx.info(f"Processing {file_uri}") return "Processed file" ``` -#### Resources and Templates - - +This works with tools, resources, and prompts: -```python {1, 6, 12} -from fastmcp import FastMCP, Context +```python +from fastmcp import FastMCP +from fastmcp.dependencies import CurrentContext +from fastmcp.server.context import Context mcp = FastMCP(name="Context Demo") @mcp.resource("resource://user-data") -async def get_user_data(ctx: Context) -> dict: - """Fetch personalized user data based on the request context.""" - # Context is available as the ctx parameter +async def get_user_data(ctx: Context = CurrentContext()) -> dict: + await ctx.debug("Fetching user data") return {"user_id": "example"} -@mcp.resource("resource://users/{user_id}/profile") -async def get_user_profile(user_id: str, ctx: Context) -> dict: - """Fetch user profile with context-aware logging.""" - # Context is available as the ctx parameter - return {"id": user_id} +@mcp.prompt +async def data_analysis_request(dataset: str, ctx: Context = CurrentContext()) -> str: + return f"Please analyze the following dataset: {dataset}" ``` -#### Prompts +**Key Points:** - +- Dependency parameters are automatically excluded from the MCP schema—clients never see them. +- Context methods are async, so your function usually needs to be async as well. +- **Each MCP request receives a new context object.** Context is scoped to a single request; state or data set in one request will not be available in subsequent requests. +- Context is only available during a request; attempting to use context methods outside a request will raise errors. + +### Legacy Type-Hint Injection + +For backwards compatibility, you can still access context by simply adding a parameter with the `Context` type hint. FastMCP will automatically inject the context instance: ```python {1, 6} from fastmcp import FastMCP, Context mcp = FastMCP(name="Context Demo") -@mcp.prompt -async def data_analysis_request(dataset: str, ctx: Context) -> str: - """Generate a request to analyze data with contextual information.""" - # Context is available as the ctx parameter - return f"Please analyze the following dataset: {dataset}" +@mcp.tool +async def process_file(file_uri: str, ctx: Context) -> str: + """Processes a file, using context for logging and resource access.""" + # Context is injected automatically based on the type hint + return "Processed file" ``` +This approach still works for tools, resources, and prompts. The parameter name doesn't matter—only the `Context` type hint is important. The type hint can also be a union (`Context | None`) or use `Annotated[]`. -### Via Runtime Dependency Function +### Via `get_context()` Function -While the simplest way to access context is through function parameter injection as shown above, there are cases where you need to access the context in code that may not be easy to modify to accept a context parameter, or that is nested deeper within your function calls. - -FastMCP provides dependency functions that allow you to retrieve the active context from anywhere within a server request's execution flow: +For code nested deeper within your function calls where passing context through parameters is inconvenient, use `get_context()` to retrieve the active context from anywhere within a request's execution flow: ```python {2,9} from fastmcp import FastMCP @@ -108,9 +105,9 @@ mcp = FastMCP(name="Dependency Demo") # Utility function that needs context but doesn't receive it as a parameter async def process_data(data: list[float]) -> dict: # Get the active context - only works when called within a request - ctx = get_context() + ctx = get_context() await ctx.info(f"Processing {len(data)} data points") - + @mcp.tool async def analyze_dataset(dataset_name: str) -> dict: # Call utility function that uses context internally @@ -120,8 +117,8 @@ async def analyze_dataset(dataset_name: str) -> dict: **Important Notes:** -- The `get_context` function should only be used within the context of a server request. Calling it outside of a request will raise a `RuntimeError`. -- The `get_context` function is server-only and should not be used in client code. +- The `get_context()` function should only be used within the context of a server request. Calling it outside of a request will raise a `RuntimeError`. +- The `get_context()` function is server-only and should not be used in client code. ## Context Capabilities @@ -527,3 +524,90 @@ async def get_tenant_data(resource_id: str) -> dict: "data": f"Tenant-specific data for {tenant_id}", } ``` + +## Custom Dependencies + + + +FastMCP's dependency injection is powered by [Docket](https://github.com/chrisguidry/docket), which provides a flexible system for injecting values into your functions. Beyond the built-in dependencies like `CurrentContext()`, you can create your own. + +### Using `Depends()` + +The simplest way to create a custom dependency is with `Depends()`. Pass any callable (sync or async function, or async context manager) and its return value will be injected: + +```python +from contextlib import asynccontextmanager +from fastmcp import FastMCP +from fastmcp.dependencies import Depends + +mcp = FastMCP(name="Custom Deps Demo") + +# Simple function dependency +def get_config() -> dict: + return {"api_url": "https://api.example.com", "timeout": 30} + +# Async function dependency +async def get_user_id() -> int: + return 42 + +@mcp.tool +async def fetch_data( + query: str, + config: dict = Depends(get_config), + user_id: int = Depends(get_user_id), +) -> str: + return f"User {user_id} fetching '{query}' from {config['api_url']}" +``` + +Dependencies using `Depends()` are automatically excluded from the MCP schema—clients never see them as parameters. + +### Resource Management with Context Managers + +For dependencies that need cleanup (database connections, file handles, etc.), use an async context manager: + +```python +from contextlib import asynccontextmanager +from fastmcp import FastMCP +from fastmcp.dependencies import Depends + +mcp = FastMCP(name="Resource Demo") + +@asynccontextmanager +async def get_database(): + db = await connect_to_database() + try: + yield db + finally: + await db.close() + +@mcp.tool +async def query_users(sql: str, db = Depends(get_database)) -> list: + return await db.execute(sql) +``` + +The context manager's cleanup code runs after your function completes, even if an error occurs. + +### Nested Dependencies + +Dependencies can depend on other dependencies: + +```python +from fastmcp import FastMCP +from fastmcp.dependencies import Depends + +mcp = FastMCP(name="Nested Demo") + +def get_base_url() -> str: + return "https://api.example.com" + +def get_api_client(base_url: str = Depends(get_base_url)) -> dict: + return {"base_url": base_url, "version": "v1"} + +@mcp.tool +async def call_api(endpoint: str, client: dict = Depends(get_api_client)) -> str: + return f"Calling {client['base_url']}/{client['version']}/{endpoint}" +``` + +### Advanced: Subclassing `Dependency` + +For more complex dependency patterns—like dependencies that need access to Docket's execution context or require custom lifecycle management—you can subclass Docket's `Dependency` class. See the [Docket documentation on dependencies](https://chrisguidry.github.io/docket/dependencies/) for details. diff --git a/docs/servers/tasks.mdx b/docs/servers/tasks.mdx new file mode 100644 index 0000000000..2ca24948eb --- /dev/null +++ b/docs/servers/tasks.mdx @@ -0,0 +1,130 @@ +--- +title: Background Tasks +sidebarTitle: Background Tasks +description: Run long-running operations asynchronously with progress tracking +icon: clock +tag: "NEW" +--- + +import { VersionBadge } from "/snippets/version-badge.mdx" + + + +Background tasks allow tools, resources, and prompts to execute asynchronously, returning immediately while work continues in the background. Clients can track progress, cancel operations, and retrieve results when ready. + +This implements the [MCP task protocol](https://modelcontextprotocol.io/specification/2025-11-25/basic/utilities/tasks) from the MCP specification, powered by [Docket](https://github.com/chrisguidry/docket) for task queue management. + +## Requirements + +For **single-process** deployments, everything works out of the box using an in-memory backend. + +For **multi-process** deployments (multiple workers, distributed systems), you'll need Redis or Valkey. See the [Docket documentation](https://chrisguidry.github.io/docket/) for backend configuration details. + +## Enabling Background Tasks + +Add `task=True` to any tool, resource, or prompt decorator: + +```python +import asyncio +from fastmcp import FastMCP +from fastmcp.dependencies import Progress + +mcp = FastMCP("MyServer") + +@mcp.tool(task=True) +async def slow_computation(duration: int, progress: Progress = Progress()) -> str: + """A long-running operation with progress tracking.""" + await progress.set_total(duration) + + for i in range(duration): + await asyncio.sleep(1) + await progress.increment() + await progress.set_message(f"Step {i + 1} of {duration}") + + return f"Completed in {duration} seconds" +``` + + +Background tasks require async functions. Sync functions will log a warning and execute immediately instead. + + +## Configuration + +Background tasks require explicit opt-in: + +| Environment Variable | Default | Description | +|---------------------|---------|-------------| +| `FASTMCP_ENABLE_TASKS` | `false` | Enable the MCP task protocol | +| `FASTMCP_ENABLE_DOCKET` | `false` | Enable the Docket task system | +| `FASTMCP_DOCKET_URL` | `memory://` | Backend URL (`memory://` or `redis://host:port/db`) | + +Both `ENABLE_TASKS` and `ENABLE_DOCKET` must be `true` for background tasks to work. + +You can also set a server-wide default in the constructor: + +```python +mcp = FastMCP("MyServer", tasks=True) +``` + +## Progress Reporting + +The `Progress` dependency lets you report progress back to clients: + +```python +from fastmcp.dependencies import Progress + +@mcp.tool(task=True) +async def process_files(files: list[str], progress: Progress = Progress()) -> str: + await progress.set_total(len(files)) + + for file in files: + await progress.set_message(f"Processing {file}") + # ... do work ... + await progress.increment() + + return f"Processed {len(files)} files" +``` + +The progress API: +- `await progress.set_total(n)` - Set the total number of steps +- `await progress.increment(amount=1)` - Increment progress +- `await progress.set_message(text)` - Update the status message + +Progress works in both immediate and background execution modes. + +## Additional Dependencies + +FastMCP provides several Docket-style dependencies you can inject into your functions: + +```python +from fastmcp.dependencies import Progress, CurrentDocket, CurrentWorker + +@mcp.tool(task=True) +async def my_task( + progress: Progress = Progress(), + # docket: Docket = CurrentDocket(), # Access the Docket instance + # worker: Worker = CurrentWorker(), # Access worker info +) -> str: + ... +``` + +By injecting `CurrentDocket()`, you gain access to the full Docket API. This lets you schedule additional background tasks from within your tool, chain tasks together, or use any of Docket's advanced features like task priorities and retries. See the [Docket documentation](https://chrisguidry.github.io/docket/) for the complete API. + +## Running Additional Workers + +For distributed task processing, start additional workers: + +```bash +fastmcp tasks worker server.py +``` + +Configure worker concurrency via environment: + +```bash +export FASTMCP_DOCKET_CONCURRENCY=20 +fastmcp tasks worker server.py +``` + + +Workers only work with Redis/Valkey backends. The `memory://` backend is single-process only. + diff --git a/examples/tasks/.envrc b/examples/tasks/.envrc new file mode 100644 index 0000000000..08830292b8 --- /dev/null +++ b/examples/tasks/.envrc @@ -0,0 +1,16 @@ +# FastMCP Tasks Example Environment Configuration +# This file is loaded by direnv (https://direnv.net/) when you cd into this directory +# Run `direnv allow` to enable automatic environment loading + +# Enable Docket support for background task execution +export FASTMCP_ENABLE_DOCKET=true + +# Enable MCP SEP-1686 task protocol support +export FASTMCP_ENABLE_TASKS=true + +# Configure Docket backend URL +# Use Redis backend (requires docker-compose up) +export FASTMCP_DOCKET_URL=redis://localhost:24242/0 + +# Or uncomment to use memory:// for single-process testing +# export FASTMCP_DOCKET_URL=memory:// diff --git a/examples/tasks/README.md b/examples/tasks/README.md new file mode 100644 index 0000000000..ed0cd5be0d --- /dev/null +++ b/examples/tasks/README.md @@ -0,0 +1,62 @@ +# FastMCP Tasks Example + +Demonstrates background task execution with Docket, including progress tracking, distributed backends, and CLI worker management. + +## Setup + +```bash +# From the fastmcp root directory +uv sync + +# Start Redis +cd examples/tasks +docker compose up -d + +# Load environment (or source .envrc manually) +direnv allow + +# Run the server +fastmcp run server.py +``` + +For single-process mode without Redis, set `FASTMCP_DOCKET_URL=memory://` (note: CLI workers won't work). + +## Running the Client + +```bash +# Background execution with progress callbacks +python examples/tasks/client.py --duration 10 + +# Immediate execution (blocks) +python examples/tasks/client.py immediate --duration 5 +``` + +## Starting Additional Workers + +With Redis, you can run additional workers to process tasks in parallel: + +```bash +fastmcp tasks worker server.py + +# Configure via environment: +export FASTMCP_DOCKET_CONCURRENCY=20 +fastmcp tasks worker server.py +``` + +**Backend options:** +- `memory://` - Single-process only (default) +- `redis://` - Distributed, multi-process (Redis or Valkey) + +## Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `FASTMCP_ENABLE_DOCKET` | `false` | Enable Docket task system | +| `FASTMCP_ENABLE_TASKS` | `false` | Enable MCP task protocol (SEP-1686) | +| `FASTMCP_DOCKET_URL` | `memory://` | Docket backend URL | + +## Learn More + +- [FastMCP Tasks Documentation](https://gofastmcp.com/docs/tasks) +- [Docket Documentation](https://github.com/PrefectHQ/docket) +- [MCP Task Protocol (SEP-1686)](https://spec.modelcontextprotocol.io/specification/architecture/tasks/) diff --git a/examples/tasks/client.py b/examples/tasks/client.py new file mode 100644 index 0000000000..f72814889b --- /dev/null +++ b/examples/tasks/client.py @@ -0,0 +1,160 @@ +""" +FastMCP Tasks Example Client + +Demonstrates calling tools both immediately and as background tasks, +with real-time progress updates via status callbacks. + +Usage: + # Make sure environment is configured (source .envrc or use direnv) + source .envrc + + # Background task execution with progress callbacks (default) + python client.py --duration 10 + + # Immediate execution (blocks until complete) + python client.py immediate --duration 5 +""" + +import asyncio +import sys +from pathlib import Path +from typing import Annotated + +import cyclopts +from mcp.types import GetTaskResult, TextContent +from rich.console import Console + +from fastmcp.client import Client + +console = Console() +app = cyclopts.App(name="tasks-client", help="FastMCP Tasks Example Client") + + +def load_server(): + """Load the example server.""" + examples_dir = Path(__file__).parent.parent.parent + if str(examples_dir) not in sys.path: + sys.path.insert(0, str(examples_dir)) + + import examples.tasks.server as server_module + + return server_module.mcp + + +# Track last message to deduplicate consecutive identical notifications +# Note: Docket fires separate events for progress.increment() and progress.set_message(), +# but MCP's statusMessage field only carries the text message (no numerical progress). +# This means we often get duplicate notifications with identical messages. +_last_notification_message = None + + +def print_notification(status: GetTaskResult) -> None: + """Callback function for push notifications from server. + + This is called automatically when the server sends notifications/tasks/status. + Deduplicates identical consecutive messages to keep output clean. + """ + global _last_notification_message + + # Skip if this is the same message we just printed + if status.statusMessage == _last_notification_message: + return + + _last_notification_message = status.statusMessage + + color = { + "working": "yellow", + "completed": "green", + "failed": "red", + }.get(status.status, "yellow") + + icon = { + "working": "🚀", + "completed": "✅", + "failed": "❌", + }.get(status.status, "⚠️") + + console.print( + f"[{color}]📢 Notification: {status.status} {icon} - {status.statusMessage}[/{color}]" + ) + + +@app.default +async def task( + duration: Annotated[ + int, + cyclopts.Parameter(help="Duration of computation in seconds (1-60)"), + ] = 10, +): + """Execute as background task with real-time progress callbacks.""" + if duration < 1 or duration > 60: + console.print("[red]Error: Duration must be between 1 and 60 seconds[/red]") + sys.exit(1) + + server = load_server() + + console.print(f"\n[bold]Calling slow_computation(duration={duration})[/bold]") + console.print("Mode: [cyan]Background task[/cyan]\n") + + async with Client(server) as client: + task_obj = await client.call_tool( + "slow_computation", + arguments={"duration": duration}, + task=True, + ) + + console.print(f"Task started: [cyan]{task_obj.task_id}[/cyan]\n") + + # Register callback for real-time push notifications + task_obj.on_status_change(print_notification) + + console.print( + "[dim]Notifications will appear as the server sends them...[/dim]\n" + ) + + # Do other work while task runs in background + for i in range(3): + await asyncio.sleep(0.5) + console.print(f"[dim]Client doing other work... ({i + 1}/3)[/dim]") + + console.print() + + # Wait for task to complete + console.print("[dim]Waiting for final result...[/dim]") + result = await task_obj.result() + + console.print("\n[bold]Result:[/bold]") + assert isinstance(result.content[0], TextContent) + console.print(f" {result.content[0].text}") + + +@app.command +async def immediate( + duration: Annotated[ + int, + cyclopts.Parameter(help="Duration of computation in seconds (1-60)"), + ] = 5, +): + """Execute the tool immediately (blocks until complete).""" + if duration < 1 or duration > 60: + console.print("[red]Error: Duration must be between 1 and 60 seconds[/red]") + sys.exit(1) + + server = load_server() + + console.print(f"\n[bold]Calling slow_computation(duration={duration})[/bold]") + console.print("Mode: [cyan]Immediate execution[/cyan]\n") + + async with Client(server) as client: + result = await client.call_tool( + "slow_computation", + arguments={"duration": duration}, + ) + + console.print("\n[bold]Result:[/bold]") + assert isinstance(result.content[0], TextContent) + console.print(f" {result.content[0].text}") + + +if __name__ == "__main__": + app() diff --git a/examples/tasks/docker-compose.yml b/examples/tasks/docker-compose.yml new file mode 100644 index 0000000000..d5643b2b38 --- /dev/null +++ b/examples/tasks/docker-compose.yml @@ -0,0 +1,10 @@ +services: + redis: + image: redis:7-alpine + ports: + - "24242:6379" + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 3s + retries: 5 diff --git a/examples/tasks/server.py b/examples/tasks/server.py new file mode 100644 index 0000000000..77b3cde82a --- /dev/null +++ b/examples/tasks/server.py @@ -0,0 +1,75 @@ +""" +FastMCP Tasks Example Server + +Demonstrates background task execution with progress tracking using Docket. + +Setup: + 1. Start Redis: docker compose up -d + 2. Load environment: source .envrc + 3. Run server: fastmcp run server.py + +The example uses Redis by default to demonstrate distributed task execution +and the fastmcp tasks CLI commands. +""" + +import asyncio +import logging +from typing import Annotated + +from docket import Logged + +from fastmcp import FastMCP +from fastmcp.dependencies import Progress + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Create server +mcp = FastMCP("Tasks Example") + + +@mcp.tool(task=True) +async def slow_computation( + duration: Annotated[int, Logged], + progress: Progress = Progress(), +) -> str: + """ + Perform a slow computation that takes `duration` seconds. + + This tool demonstrates progress tracking with background tasks. + It logs progress every 1-2 seconds and reports progress via Docket. + + Args: + duration: Number of seconds the computation should take (1-60) + + Returns: + A completion message with the total duration + """ + if duration < 1 or duration > 60: + raise ValueError("Duration must be between 1 and 60 seconds") + + logger.info(f"Starting slow computation for {duration} seconds") + + # Set total progress units + await progress.set_total(duration) + + # Process each second + for i in range(duration): + # Sleep for 1 second + await asyncio.sleep(1) + + # Update progress + elapsed = i + 1 + remaining = duration - elapsed + await progress.increment() + await progress.set_message( + f"Working... {elapsed}/{duration}s ({remaining}s remaining)" + ) + + # Log every 1-2 seconds + if elapsed % 2 == 0 or elapsed == duration: + logger.info(f"Progress: {elapsed}/{duration}s") + + logger.info(f"Completed computation in {duration} seconds") + return f"Computation completed successfully in {duration} seconds!" diff --git a/pyproject.toml b/pyproject.toml index 93b5043de2..af7001bd00 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,12 +10,13 @@ dependencies = [ "mcp>=1.23.1", "openapi-pydantic>=0.5.1", "platformdirs>=4.0.0", + "pydocket>=0.14.0", "rich>=13.9.4", "cyclopts>=4.0.0", "authlib>=1.6.5", "pydantic[email]>=2.11.7", "pyperclip>=1.9.0", - "py-key-value-aio[disk,memory]>=0.2.8,<0.4.0", + "py-key-value-aio[disk,keyring,memory]>=0.3.0,<0.4.0", "uvicorn>=0.35", "websockets>=15.0.1", "jsonschema-path>=0.3.4", diff --git a/src/fastmcp/cli/cli.py b/src/fastmcp/cli/cli.py index 3ed5a3c01e..6904c036af 100644 --- a/src/fastmcp/cli/cli.py +++ b/src/fastmcp/cli/cli.py @@ -19,6 +19,7 @@ import fastmcp from fastmcp.cli import run as run_module from fastmcp.cli.install import install_app +from fastmcp.cli.tasks import tasks_app from fastmcp.utilities.cli import is_already_in_uv_subprocess, load_and_merge_config from fastmcp.utilities.inspect import ( InspectFormat, @@ -853,6 +854,9 @@ async def prepare( # Add install subcommands using proper Cyclopts pattern app.command(install_app) +# Add tasks subcommand group +app.command(tasks_app) + if __name__ == "__main__": app() diff --git a/src/fastmcp/cli/tasks.py b/src/fastmcp/cli/tasks.py new file mode 100644 index 0000000000..b5eceb4e90 --- /dev/null +++ b/src/fastmcp/cli/tasks.py @@ -0,0 +1,118 @@ +"""FastMCP tasks CLI for Docket task management.""" + +import asyncio +import sys +from typing import Annotated + +import cyclopts +from rich.console import Console + +from fastmcp.utilities.cli import load_and_merge_config +from fastmcp.utilities.logging import get_logger + +logger = get_logger("cli.tasks") +console = Console() + +tasks_app = cyclopts.App( + name="tasks", + help="Manage FastMCP background tasks using Docket", +) + + +def check_docket_enabled() -> None: + """Check if Docket is enabled with a distributed backend. + + Raises: + SystemExit: If Docket isn't enabled or using memory:// URL + """ + import fastmcp + + # Check if Docket is enabled + if not fastmcp.settings.enable_docket: + console.print( + "[bold red]✗ Docket not enabled[/bold red]\n\n" + "Docket task support is not enabled.\n\n" + "To enable Docket, set the environment variable:\n" + " [cyan]export FASTMCP_ENABLE_DOCKET=true[/cyan]\n\n" + "Then try again." + ) + sys.exit(1) + + docket_url = fastmcp.settings.docket.url + + # Check for memory:// URL and provide helpful error + if docket_url.startswith("memory://"): + console.print( + "[bold red]✗ In-memory backend not supported by CLI[/bold red]\n\n" + "Your Docket configuration uses an in-memory backend (memory://) which\n" + "only works within a single process.\n\n" + "To use [cyan]fastmcp tasks[/cyan] CLI commands (which run in separate\n" + "processes), you need a distributed backend:\n\n" + "[bold]1. Install Redis or Valkey:[/bold]\n" + " [dim]macOS:[/dim] brew install redis\n" + " [dim]Ubuntu:[/dim] apt install redis-server\n" + " [dim]Valkey:[/dim] See https://valkey.io/\n\n" + "[bold]2. Start the service:[/bold]\n" + " redis-server\n\n" + "[bold]3. Configure Docket URL:[/bold]\n" + " [dim]Environment variable:[/dim]\n" + " export FASTMCP_DOCKET_URL=redis://localhost:6379/0\n\n" + "[bold]4. Try again[/bold]\n\n" + "The memory backend works great for single-process servers, but the CLI\n" + "commands need a distributed backend to coordinate across processes.\n\n" + "Need help? See: [cyan]https://gofastmcp.com/docs/tasks[/cyan]" + ) + sys.exit(1) + + +@tasks_app.command +def worker( + server_spec: Annotated[ + str | None, + cyclopts.Parameter( + help="Python file to run, optionally with :object suffix, or None to auto-detect fastmcp.json" + ), + ] = None, +) -> None: + """Start an additional worker to process background tasks. + + Connects to your Docket backend and processes tasks in parallel with + any other running workers. Configure via environment variables + (FASTMCP_DOCKET_*). + + Example: + fastmcp tasks worker server.py + fastmcp tasks worker examples/tasks/server.py + """ + import fastmcp + + check_docket_enabled() + + # Load server to get task functions + try: + config, resolved_spec = load_and_merge_config(server_spec) + except FileNotFoundError: + sys.exit(1) + + # Load the server + server = asyncio.run(config.source.load_server()) + + async def run_worker(): + """Enter server lifespan and camp forever.""" + async with server._lifespan_manager(): + console.print( + f"[bold green]✓[/bold green] Starting worker for [cyan]{server.name}[/cyan]" + ) + console.print(f" Docket: {fastmcp.settings.docket.name}") + console.print(f" Backend: {fastmcp.settings.docket.url}") + console.print(f" Concurrency: {fastmcp.settings.docket.concurrency}") + + # Server's lifespan has started its worker - just camp here forever + while True: + await asyncio.sleep(3600) + + try: + asyncio.run(run_worker()) + except KeyboardInterrupt: + console.print("\n[yellow]Worker stopped[/yellow]") + sys.exit(0) diff --git a/src/fastmcp/client/client.py b/src/fastmcp/client/client.py index c48b91e5db..3842cd4f43 100644 --- a/src/fastmcp/client/client.py +++ b/src/fastmcp/client/client.py @@ -4,6 +4,8 @@ import copy import datetime import secrets +import uuid +import weakref from contextlib import AsyncExitStack, asynccontextmanager from dataclasses import dataclass, field from pathlib import Path @@ -15,6 +17,19 @@ import pydantic_core from exceptiongroup import catch from mcp import ClientSession +from mcp.types import ( + CancelTaskRequest, + CancelTaskRequestParams, + GetTaskPayloadRequest, + GetTaskPayloadRequestParams, + GetTaskPayloadResult, + GetTaskRequest, + GetTaskRequestParams, + GetTaskResult, + ListTasksRequest, + PaginatedRequestParams, + TaskStatusNotification, +) from pydantic import AnyUrl import fastmcp @@ -36,6 +51,13 @@ SamplingHandler, create_sampling_callback, ) +from fastmcp.client.tasks import ( + PromptTask, + ResourceTask, + TaskNotificationHandler, + ToolTask, + _task_capable_initialize, +) from fastmcp.exceptions import ToolError from fastmcp.mcp_config import MCPConfig from fastmcp.server import FastMCP @@ -77,16 +99,6 @@ T = TypeVar("T", bound="ClientTransport") -def _timeout_to_seconds( - timeout: datetime.timedelta | float | int | None, -) -> float | None: - if timeout is None: - return None - if isinstance(timeout, datetime.timedelta): - return timeout.total_seconds() - return float(timeout) - - @dataclass class ClientSessionState: """Holds all session-related state for a Client instance. @@ -104,6 +116,17 @@ class ClientSessionState: initialize_result: mcp.types.InitializeResult | None = None +@dataclass +class CallToolResult: + """Parsed result from a tool call.""" + + content: list[mcp.types.ContentBlock] + structured_content: dict[str, Any] | None + meta: dict[str, Any] | None + data: Any = None + is_error: bool = False + + class Client(Generic[ClientTransportT]): """ MCP client that delegates connection management to a Transport instance. @@ -258,7 +281,13 @@ def __init__( # handle init handshake timeout if init_timeout is None: init_timeout = fastmcp.settings.client_init_timeout - self._init_timeout = _timeout_to_seconds(init_timeout) + if isinstance(init_timeout, datetime.timedelta): + init_timeout = init_timeout.total_seconds() + elif not init_timeout: + init_timeout = None + else: + init_timeout = float(init_timeout) + self._init_timeout = init_timeout self.auto_initialize = auto_initialize @@ -266,7 +295,7 @@ def __init__( "sampling_callback": None, "list_roots_callback": None, "logging_callback": create_log_callback(log_handler), - "message_handler": message_handler, + "message_handler": message_handler or TaskNotificationHandler(self), "read_timeout_seconds": timeout, # ty: ignore[invalid-argument-type] "client_info": client_info, } @@ -287,6 +316,15 @@ def __init__( # Session context management - see class docstring for detailed explanation self._session_state = ClientSessionState() + # Track task IDs submitted by this client (for list_tasks support) + self._submitted_task_ids: set[str] = set() + + # Registry for routing notifications/tasks/status to Task objects + + self._task_registry: dict[ + str, weakref.ref[ToolTask | PromptTask | ResourceTask] + ] = {} + @property def session(self) -> ClientSession: """Get the current active session. Raises RuntimeError if not connected.""" @@ -359,7 +397,7 @@ async def _context_manager(self): **self._session_kwargs ) as session: self._session_state.session = session - # Initialize the session + # Initialize the session if auto_initialize is enabled try: if self.auto_initialize: await self.initialize() @@ -370,6 +408,69 @@ async def _context_manager(self): self._session_state.session = None self._session_state.initialize_result = None + async def initialize( + self, + timeout: datetime.timedelta | float | int | None = None, + ) -> mcp.types.InitializeResult: + """Send an initialize request to the server. + + This method performs the MCP initialization handshake with the server, + exchanging capabilities and server information. It is idempotent - calling + it multiple times returns the cached result from the first call. + + The initialization happens automatically when entering the client context + manager unless `auto_initialize=False` was set during client construction. + Manual calls to this method are only needed when auto-initialization is disabled. + + Args: + timeout: Optional timeout for the initialization request (seconds or timedelta). + If None, uses the client's init_timeout setting. + + Returns: + InitializeResult: The server's initialization response containing server info, + capabilities, protocol version, and optional instructions. + + Raises: + RuntimeError: If the client is not connected or initialization times out. + + Example: + ```python + # With auto-initialization disabled + client = Client(server, auto_initialize=False) + async with client: + result = await client.initialize() + print(f"Server: {result.serverInfo.name}") + print(f"Instructions: {result.instructions}") + ``` + """ + + if self.initialize_result is not None: + return self.initialize_result + + if timeout is None: + timeout = self._init_timeout + + # Convert timeout if needed + if isinstance(timeout, datetime.timedelta): + timeout = timeout.total_seconds() + elif timeout is not None: + timeout = float(timeout) + + try: + with anyio.fail_after(timeout): + if fastmcp.settings.enable_tasks: + self._session_state.initialize_result = ( + await _task_capable_initialize(self.session) + ) + else: + self._session_state.initialize_result = ( + await self.session.initialize() + ) + + return self._session_state.initialize_result + except TimeoutError as e: + raise RuntimeError("Failed to initialize server session") from e + async def __aenter__(self): return await self._connect() @@ -491,61 +592,34 @@ async def _session_runner(self): # Ensure ready event is set even if context manager entry fails self._session_state.ready_event.set() + def _handle_task_status_notification( + self, notification: TaskStatusNotification + ) -> None: + """Route task status notification to appropriate Task object. + + Called when notifications/tasks/status is received from server. + Updates Task object's cache and triggers events/callbacks. + """ + # Extract task ID from notification params + task_id = notification.params.taskId + if not task_id: + return + + # Look up task in registry (weakref) + task_ref = self._task_registry.get(task_id) + if task_ref: + task = task_ref() # Dereference weakref + if task: + # Convert notification params to GetTaskResult (they share the same fields via Task) + status = GetTaskResult.model_validate(notification.params.model_dump()) + task._handle_status_notification(status) + async def close(self): await self._disconnect(force=True) await self.transport.close() # --- MCP Client Methods --- - async def initialize( - self, - timeout: datetime.timedelta | float | int | None = None, - ) -> mcp.types.InitializeResult: - """Send an initialize request to the server. - - This method performs the MCP initialization handshake with the server, - exchanging capabilities and server information. It is idempotent - calling - it multiple times returns the cached result from the first call. - - The initialization happens automatically when entering the client context - manager unless `auto_initialize=False` was set during client construction. - Manual calls to this method are only needed when auto-initialization is disabled. - - Args: - timeout: Optional timeout for the initialization request (seconds or timedelta). - If None, uses the client's init_timeout setting. - - Returns: - InitializeResult: The server's initialization response containing server info, - capabilities, protocol version, and optional instructions. - - Raises: - RuntimeError: If the client is not connected or initialization times out. - - Example: - ```python - # With auto-initialization disabled - client = Client(server, auto_initialize=False) - async with client: - result = await client.initialize() - print(f"Server: {result.serverInfo.name}") - print(f"Instructions: {result.instructions}") - ``` - """ - - if self.initialize_result is not None: - return self.initialize_result - - if timeout is None: - timeout = self._init_timeout - try: - with anyio.fail_after(_timeout_to_seconds(timeout)): - initialize_result = await self.session.initialize() - self._session_state.initialize_result = initialize_result - return initialize_result - except TimeoutError as e: - raise RuntimeError("Failed to initialize server session") from e - async def ping(self) -> bool: """Send a ping request.""" result = await self.session.send_ping() @@ -649,12 +723,13 @@ async def list_resource_templates( return result.resourceTemplates async def read_resource_mcp( - self, uri: AnyUrl | str + self, uri: AnyUrl | str, meta: dict[str, Any] | None = None ) -> mcp.types.ReadResourceResult: """Send a resources/read request and return the complete MCP protocol result. Args: uri (AnyUrl | str): The URI of the resource to read. Can be a string or an AnyUrl object. + meta (dict[str, Any] | None, optional): Request metadata (e.g., for SEP-1686 tasks). Defaults to None. Returns: mcp.types.ReadResourceResult: The complete response object from the protocol, @@ -667,24 +742,73 @@ async def read_resource_mcp( if isinstance(uri, str): uri = AnyUrl(uri) # Ensure AnyUrl - result = await self.session.read_resource(uri) + + # If meta provided, use send_request for SEP-1686 task support + if meta: + task_dict = meta.get("modelcontextprotocol.io/task") + request = mcp.types.ReadResourceRequest( + params=mcp.types.ReadResourceRequestParams( + uri=uri, + task=mcp.types.TaskMetadata(**task_dict) + if task_dict + else None, # SEP-1686: task as direct param (spec-compliant) + ) + ) + result = await self.session.send_request( + request=request, # type: ignore[arg-type] + result_type=mcp.types.ReadResourceResult, + ) + else: + result = await self.session.read_resource(uri) return result + @overload + async def read_resource( + self, + uri: AnyUrl | str, + *, + task: Literal[False] = False, + ) -> list[mcp.types.TextResourceContents | mcp.types.BlobResourceContents]: ... + + @overload async def read_resource( - self, uri: AnyUrl | str - ) -> list[mcp.types.TextResourceContents | mcp.types.BlobResourceContents]: + self, + uri: AnyUrl | str, + *, + task: Literal[True], + task_id: str | None = None, + ttl: int = 60000, + ) -> ResourceTask: ... + + async def read_resource( + self, + uri: AnyUrl | str, + *, + task: bool = False, + task_id: str | None = None, + ttl: int = 60000, + ) -> ( + list[mcp.types.TextResourceContents | mcp.types.BlobResourceContents] + | ResourceTask + ): """Read the contents of a resource or resolved template. Args: uri (AnyUrl | str): The URI of the resource to read. Can be a string or an AnyUrl object. + task (bool): If True, execute as background task (SEP-1686). Defaults to False. + task_id (str | None): Optional client-provided task ID (auto-generated if not provided). + ttl (int): Time to keep results available in milliseconds (default 60s). Returns: - list[mcp.types.TextResourceContents | mcp.types.BlobResourceContents]: A list of content - objects, typically containing either text or binary data. + list[mcp.types.TextResourceContents | mcp.types.BlobResourceContents] | ResourceTask: + A list of content objects if task=False, or a ResourceTask object if task=True. Raises: RuntimeError: If called while the client is not connected. """ + if task: + return await self._read_resource_as_task(uri, task_id, ttl) + if isinstance(uri, str): try: uri = AnyUrl(uri) # Ensure AnyUrl @@ -695,6 +819,59 @@ async def read_resource( result = await self.read_resource_mcp(uri) return result.contents + async def _read_resource_as_task( + self, + uri: AnyUrl | str, + task_id: str | None = None, + ttl: int = 60000, + ) -> ResourceTask: + """Read a resource for background execution (SEP-1686). + + Returns a ResourceTask object that handles both background and immediate execution. + + Args: + uri: Resource URI to read + task_id: Optional client-provided task ID (ignored, for backward compatibility) + ttl: Time to keep results available in milliseconds (default 60s) + + Returns: + ResourceTask: Future-like object for accessing task status and results + """ + # Per SEP-1686 final spec: client sends only ttl, server generates taskId + # Read resource with task metadata (no taskId sent) + result = await self.read_resource_mcp( + uri=uri, + meta={ + "modelcontextprotocol.io/task": { + "ttl": ttl, + } + }, + ) + + # Check if server accepted background execution + if result.meta and "modelcontextprotocol.io/task" in result.meta: + # Background execution accepted - extract server-generated taskId + server_task_id = result.meta["modelcontextprotocol.io/task"]["taskId"] + # Track this task ID for list_tasks() + self._submitted_task_ids.add(server_task_id) + + # Create task object + task_obj = ResourceTask( + self, server_task_id, uri=str(uri), immediate_result=None + ) + + # Register for notification routing + self._task_registry[server_task_id] = weakref.ref(task_obj) # type: ignore[assignment] + + return task_obj + else: + # Server declined background execution (graceful degradation) + # Use a synthetic task ID for the immediate result + synthetic_task_id = task_id or str(uuid.uuid4()) + return ResourceTask( + self, synthetic_task_id, uri=str(uri), immediate_result=result.contents + ) + # async def subscribe_resource(self, uri: AnyUrl | str) -> None: # """Send a resources/subscribe request.""" # if isinstance(uri, str): @@ -738,13 +915,17 @@ async def list_prompts(self) -> list[mcp.types.Prompt]: # --- Prompt --- async def get_prompt_mcp( - self, name: str, arguments: dict[str, Any] | None = None + self, + name: str, + arguments: dict[str, Any] | None = None, + meta: dict[str, Any] | None = None, ) -> mcp.types.GetPromptResult: """Send a prompts/get request and return the complete MCP protocol result. Args: name (str): The name of the prompt to retrieve. arguments (dict[str, Any] | None, optional): Arguments to pass to the prompt. Defaults to None. + meta (dict[str, Any] | None, optional): Request metadata (e.g., for SEP-1686 tasks). Defaults to None. Returns: mcp.types.GetPromptResult: The complete response object from the protocol, @@ -768,30 +949,135 @@ async def get_prompt_mcp( "utf-8" ) - result = await self.session.get_prompt( - name=name, arguments=serialized_arguments - ) + # If meta provided, use send_request for SEP-1686 task support + if meta: + task_dict = meta.get("modelcontextprotocol.io/task") + request = mcp.types.GetPromptRequest( + params=mcp.types.GetPromptRequestParams( + name=name, + arguments=serialized_arguments, + task=mcp.types.TaskMetadata(**task_dict) + if task_dict + else None, # SEP-1686: task as direct param (spec-compliant) + ) + ) + result = await self.session.send_request( + request=request, # type: ignore[arg-type] + result_type=mcp.types.GetPromptResult, + ) + else: + result = await self.session.get_prompt( + name=name, arguments=serialized_arguments + ) return result + @overload async def get_prompt( - self, name: str, arguments: dict[str, Any] | None = None - ) -> mcp.types.GetPromptResult: + self, + name: str, + arguments: dict[str, Any] | None = None, + *, + task: Literal[False] = False, + ) -> mcp.types.GetPromptResult: ... + + @overload + async def get_prompt( + self, + name: str, + arguments: dict[str, Any] | None = None, + *, + task: Literal[True], + task_id: str | None = None, + ttl: int = 60000, + ) -> PromptTask: ... + + async def get_prompt( + self, + name: str, + arguments: dict[str, Any] | None = None, + *, + task: bool = False, + task_id: str | None = None, + ttl: int = 60000, + ) -> mcp.types.GetPromptResult | PromptTask: """Retrieve a rendered prompt message list from the server. Args: name (str): The name of the prompt to retrieve. arguments (dict[str, Any] | None, optional): Arguments to pass to the prompt. Defaults to None. + task (bool): If True, execute as background task (SEP-1686). Defaults to False. + task_id (str | None): Optional client-provided task ID (auto-generated if not provided). + ttl (int): Time to keep results available in milliseconds (default 60s). Returns: - mcp.types.GetPromptResult: The complete response object from the protocol, - containing the prompt messages and any additional metadata. + mcp.types.GetPromptResult | PromptTask: The complete response object if task=False, + or a PromptTask object if task=True. Raises: RuntimeError: If called while the client is not connected. """ + if task: + return await self._get_prompt_as_task(name, arguments, task_id, ttl) + result = await self.get_prompt_mcp(name=name, arguments=arguments) return result + async def _get_prompt_as_task( + self, + name: str, + arguments: dict[str, Any] | None = None, + task_id: str | None = None, + ttl: int = 60000, + ) -> PromptTask: + """Get a prompt for background execution (SEP-1686). + + Returns a PromptTask object that handles both background and immediate execution. + + Args: + name: Prompt name to get + arguments: Prompt arguments + task_id: Optional client-provided task ID (ignored, for backward compatibility) + ttl: Time to keep results available in milliseconds (default 60s) + + Returns: + PromptTask: Future-like object for accessing task status and results + """ + # Per SEP-1686 final spec: client sends only ttl, server generates taskId + # Call prompt with task metadata (no taskId sent) + result = await self.get_prompt_mcp( + name=name, + arguments=arguments or {}, + meta={ + "modelcontextprotocol.io/task": { + "ttl": ttl, + } + }, + ) + + # Check if server accepted background execution + if result.meta and "modelcontextprotocol.io/task" in result.meta: + # Background execution accepted - extract server-generated taskId + server_task_id = result.meta["modelcontextprotocol.io/task"]["taskId"] + # Track this task ID for list_tasks() + self._submitted_task_ids.add(server_task_id) + + # Create task object + task_obj = PromptTask( + self, server_task_id, prompt_name=name, immediate_result=None + ) + + # Register for notification routing + self._task_registry[server_task_id] = weakref.ref(task_obj) # type: ignore[assignment] + + return task_obj + else: + # Server declined background execution (graceful degradation) + # Use a synthetic task ID for the immediate result + synthetic_task_id = task_id or str(uuid.uuid4()) + return PromptTask( + self, synthetic_task_id, prompt_name=name, immediate_result=result + ) + # --- Completion --- async def complete_mcp( @@ -914,24 +1200,123 @@ async def call_tool_mcp( if isinstance(timeout, int | float): timeout = datetime.timedelta(seconds=float(timeout)) - result = await self.session.call_tool( - name=name, - arguments=arguments, - read_timeout_seconds=timeout, # ty: ignore[invalid-argument-type] - progress_callback=progress_handler or self._progress_handler, - meta=meta, - ) + # For task submissions, use send_request to bypass SDK validation + # Task acknowledgments don't have structured content, which would fail validation + if meta and "modelcontextprotocol.io/task" in meta: + task_dict = meta.get("modelcontextprotocol.io/task") + request = mcp.types.CallToolRequest( + params=mcp.types.CallToolRequestParams( + name=name, + arguments=arguments, + task=mcp.types.TaskMetadata(**task_dict) + if task_dict + else None, # SEP-1686: task as direct param (spec-compliant) + ) + ) + result = await self.session.send_request( + request=request, # type: ignore[arg-type] + result_type=mcp.types.CallToolResult, + request_read_timeout_seconds=timeout, # type: ignore[arg-type] + progress_callback=progress_handler or self._progress_handler, + ) + else: + result = await self.session.call_tool( + name=name, + arguments=arguments, + read_timeout_seconds=timeout, # ty: ignore[invalid-argument-type] + progress_callback=progress_handler or self._progress_handler, + meta=meta, + ) return result + async def _parse_call_tool_result( + self, name: str, result: mcp.types.CallToolResult, raise_on_error: bool = False + ) -> CallToolResult: + """Parse an mcp.types.CallToolResult into our CallToolResult dataclass. + + Args: + name: Tool name (for schema lookup) + result: Raw MCP protocol result + raise_on_error: Whether to raise ToolError on errors + + Returns: + CallToolResult: Parsed result with structured data + """ + data = None + if result.isError and raise_on_error: + msg = cast(mcp.types.TextContent, result.content[0]).text + raise ToolError(msg) + elif result.structuredContent: + try: + if name not in self.session._tool_output_schemas: + await self.session.list_tools() + if name in self.session._tool_output_schemas: + output_schema = self.session._tool_output_schemas.get(name) + if output_schema: + if output_schema.get("x-fastmcp-wrap-result"): + output_schema = output_schema.get("properties", {}).get( + "result" + ) + structured_content = result.structuredContent.get("result") + else: + structured_content = result.structuredContent + output_type = json_schema_to_type(output_schema) + type_adapter = get_cached_typeadapter(output_type) + data = type_adapter.validate_python(structured_content) + else: + data = result.structuredContent + except Exception as e: + logger.error(f"[{self.name}] Error parsing structured content: {e}") + + return CallToolResult( + content=result.content, + structured_content=result.structuredContent, + meta=result.meta, + data=data, + is_error=result.isError, + ) + + @overload async def call_tool( self, name: str, arguments: dict[str, Any] | None = None, + *, timeout: datetime.timedelta | float | int | None = None, progress_handler: ProgressHandler | None = None, raise_on_error: bool = True, meta: dict[str, Any] | None = None, - ) -> CallToolResult: + task: Literal[False] = False, + ) -> CallToolResult: ... + + @overload + async def call_tool( + self, + name: str, + arguments: dict[str, Any] | None = None, + *, + timeout: datetime.timedelta | float | int | None = None, + progress_handler: ProgressHandler | None = None, + raise_on_error: bool = True, + meta: dict[str, Any] | None = None, + task: Literal[True], + task_id: str | None = None, + ttl: int = 60000, + ) -> ToolTask: ... + + async def call_tool( + self, + name: str, + arguments: dict[str, Any] | None = None, + *, + timeout: datetime.timedelta | float | int | None = None, + progress_handler: ProgressHandler | None = None, + raise_on_error: bool = True, + meta: dict[str, Any] | None = None, + task: bool = False, + task_id: str | None = None, + ttl: int = 60000, + ) -> CallToolResult | ToolTask: """Call a tool on the server. Unlike call_tool_mcp, this method raises a ToolError if the tool call results in an error. @@ -941,15 +1326,18 @@ async def call_tool( arguments (dict[str, Any] | None, optional): Arguments to pass to the tool. Defaults to None. timeout (datetime.timedelta | float | int | None, optional): The timeout for the tool call. Defaults to None. progress_handler (ProgressHandler | None, optional): The progress handler to use for the tool call. Defaults to None. - raise_on_error (bool, optional): Whether to raise a ToolError if the tool call results in an error. Defaults to True. + raise_on_error (bool, optional): Whether to raise an exception if the tool call results in an error. Defaults to True. meta (dict[str, Any] | None, optional): Additional metadata to include with the request. This is useful for passing contextual information (like user IDs, trace IDs, or preferences) that shouldn't be tool arguments but may influence server-side processing. The server can access this via `context.request_context.meta`. Defaults to None. + task (bool): If True, execute as background task (SEP-1686). Defaults to False. + task_id (str | None): Optional client-provided task ID (auto-generated if not provided). + ttl (int): Time to keep results available in milliseconds (default 60s). Returns: - CallToolResult: - The content returned by the tool. If the tool returns structured + CallToolResult | ToolTask: The content returned by the tool if task=False, + or a ToolTask object if task=True. If the tool returns structured outputs, they are returned as a dataclass (if an output schema is available) or a dictionary; otherwise, a list of content blocks is returned. Note: to receive both structured and @@ -960,6 +1348,9 @@ async def call_tool( ToolError: If the tool call results in an error. RuntimeError: If called while the client is not connected. """ + if task: + return await self._call_tool_as_task(name, arguments, task_id, ttl) + result = await self.call_tool_mcp( name=name, arguments=arguments or {}, @@ -967,38 +1358,184 @@ async def call_tool( progress_handler=progress_handler, meta=meta, ) - data = None - if result.isError and raise_on_error: - msg = cast(mcp.types.TextContent, result.content[0]).text - raise ToolError(msg) - elif result.structuredContent: + return await self._parse_call_tool_result( + name, result, raise_on_error=raise_on_error + ) + + async def _call_tool_as_task( + self, + name: str, + arguments: dict[str, Any] | None = None, + task_id: str | None = None, + ttl: int = 60000, + ) -> ToolTask: + """Call a tool for background execution (SEP-1686). + + Returns a ToolTask object that handles both background and immediate execution. + If the server accepts background execution, ToolTask will poll for results. + If the server declines (graceful degradation), ToolTask wraps the immediate result. + + Args: + name: Tool name to call + arguments: Tool arguments + task_id: Optional client-provided task ID (ignored, for backward compatibility) + ttl: Time to keep results available in milliseconds (default 60s) + + Returns: + ToolTask: Future-like object for accessing task status and results + """ + # Per SEP-1686 final spec: client sends only ttl, server generates taskId + # Call tool with task metadata (no taskId sent) + result = await self.call_tool_mcp( + name=name, + arguments=arguments or {}, + meta={ + "modelcontextprotocol.io/task": { + "ttl": ttl, + } + }, + ) + + # Check if server accepted background execution + # If response includes task metadata, server accepted background mode + if result.meta and "modelcontextprotocol.io/task" in result.meta: + # Background execution accepted - extract server-generated taskId + server_task_id = result.meta["modelcontextprotocol.io/task"]["taskId"] + # Track this task ID for list_tasks() + self._submitted_task_ids.add(server_task_id) + + # Create task object + task_obj = ToolTask( + self, server_task_id, tool_name=name, immediate_result=None + ) + + # Register for notification routing + self._task_registry[server_task_id] = weakref.ref(task_obj) # type: ignore[assignment] + + return task_obj + else: + # Server declined background execution (graceful degradation) + # Executed synchronously - wrap the immediate result + # Need to convert mcp.types.CallToolResult to our CallToolResult + parsed_result = await self._parse_call_tool_result(name, result) + # Use a synthetic task ID for the immediate result + synthetic_task_id = task_id or str(uuid.uuid4()) + return ToolTask( + self, synthetic_task_id, tool_name=name, immediate_result=parsed_result + ) + + async def get_task_status(self, task_id: str) -> GetTaskResult: + """Query the status of a background task. + + Sends a 'tasks/get' MCP protocol request over the existing transport. + + Args: + task_id: The task ID returned from call_tool_as_task + + Returns: + GetTaskResult: Status information including taskId, status, pollInterval, etc. + + Raises: + RuntimeError: If client not connected + """ + request = GetTaskRequest(params=GetTaskRequestParams(taskId=task_id)) + return await self.session.send_request( + request=request, # type: ignore[arg-type] + result_type=GetTaskResult, # type: ignore[arg-type] + ) + + async def get_task_result(self, task_id: str) -> Any: + """Retrieve the raw result of a completed background task. + + Sends a 'tasks/result' MCP protocol request over the existing transport. + Returns the raw result - callers should parse it appropriately. + + Args: + task_id: The task ID returned from call_tool_as_task + + Returns: + Any: The raw result (could be tool, prompt, or resource result) + + Raises: + RuntimeError: If client not connected, task not found, or task failed + """ + request = GetTaskPayloadRequest( + params=GetTaskPayloadRequestParams(taskId=task_id) + ) + # Return raw result - Task classes handle type-specific parsing + result = await self.session.send_request( + request=request, # type: ignore[arg-type] + result_type=GetTaskPayloadResult, # type: ignore[arg-type] + ) + # Return as dict for compatibility with Task class parsing + return result.model_dump(exclude_none=True, by_alias=True) + + async def list_tasks( + self, + cursor: str | None = None, + limit: int = 50, + ) -> dict[str, Any]: + """List background tasks. + + Sends a 'tasks/list' MCP protocol request to the server. If the server + returns an empty list (indicating client-side tracking), falls back to + querying status for locally tracked task IDs. + + Args: + cursor: Optional pagination cursor + limit: Maximum number of tasks to return (default 50) + + Returns: + dict: Response with structure: + - tasks: List of task status dicts with taskId, status, etc. + - nextCursor: Optional cursor for next page + + Raises: + RuntimeError: If client not connected + """ + # Send protocol request + params = PaginatedRequestParams(cursor=cursor, limit=limit) + request = ListTasksRequest(params=params) + server_response = await self.session.send_request( + request=request, # type: ignore[invalid-argument-type] + result_type=mcp.types.ListTasksResult, + ) + + # If server returned tasks, use those + if server_response.tasks: + return server_response.model_dump(by_alias=True) + + # Server returned empty - fall back to client-side tracking + tasks = [] + for task_id in list(self._submitted_task_ids)[:limit]: try: - if name not in self.session._tool_output_schemas: - await self.session.list_tools() - if name in self.session._tool_output_schemas: - output_schema = self.session._tool_output_schemas.get(name) - if output_schema: - if output_schema.get("x-fastmcp-wrap-result"): - output_schema = output_schema.get("properties", {}).get( - "result" - ) - structured_content = result.structuredContent.get("result") - else: - structured_content = result.structuredContent - output_type = json_schema_to_type(output_schema) - type_adapter = get_cached_typeadapter(output_type) - data = type_adapter.validate_python(structured_content) - else: - data = result.structuredContent - except Exception as e: - logger.error(f"[{self.name}] Error parsing structured content: {e}") + status = await self.get_task_status(task_id) + tasks.append(status.model_dump(by_alias=True)) + except Exception: + # Task may have expired or been deleted, skip it + continue - return CallToolResult( - content=result.content, - structured_content=result.structuredContent, - meta=result.meta, - data=data, - is_error=result.isError, + return {"tasks": tasks, "nextCursor": None} + + async def cancel_task(self, task_id: str) -> mcp.types.CancelTaskResult: + """Cancel a task, transitioning it to cancelled state. + + Sends a 'tasks/cancel' MCP protocol request. Task will halt execution + and transition to cancelled state. + + Args: + task_id: The task ID to cancel + + Returns: + CancelTaskResult: The task status showing cancelled state + + Raises: + RuntimeError: If task doesn't exist + """ + request = CancelTaskRequest(params=CancelTaskRequestParams(taskId=task_id)) + return await self.session.send_request( + request=request, # type: ignore[invalid-argument-type] + result_type=mcp.types.CancelTaskResult, ) @classmethod @@ -1008,12 +1545,3 @@ def generate_name(cls, name: str | None = None) -> str: return f"{class_name}-{secrets.token_hex(2)}" else: return f"{class_name}-{name}-{secrets.token_hex(2)}" - - -@dataclass -class CallToolResult: - content: list[mcp.types.ContentBlock] - structured_content: dict[str, Any] | None - meta: dict[str, Any] | None - data: Any = None - is_error: bool = False diff --git a/src/fastmcp/client/tasks.py b/src/fastmcp/client/tasks.py new file mode 100644 index 0000000000..1b3c6cb6c3 --- /dev/null +++ b/src/fastmcp/client/tasks.py @@ -0,0 +1,614 @@ +"""SEP-1686 client Task classes.""" + +from __future__ import annotations + +import abc +import asyncio +import inspect +import time +import weakref +from collections.abc import Awaitable, Callable +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Generic, TypeVar + +import mcp.types +from mcp import ClientSession +from mcp.client.session import ( + SUPPORTED_PROTOCOL_VERSIONS, + _default_elicitation_callback, + _default_list_roots_callback, + _default_sampling_callback, +) +from mcp.types import GetTaskResult, TaskStatusNotification + +from fastmcp.client.messages import Message, MessageHandler +from fastmcp.utilities.logging import get_logger + +logger = get_logger(__name__) + +if TYPE_CHECKING: + from fastmcp.client.client import CallToolResult, Client + + +# TODO(SEP-1686): Remove this function when the MCP SDK adds an +# `experimental_capabilities` parameter to ClientSession (the server side +# already has this via `create_initialization_options(experimental_capabilities={})`). +# The SDK currently hardcodes `experimental=None` in ClientSession.initialize(). +async def _task_capable_initialize( + session: ClientSession, +) -> mcp.types.InitializeResult: + """Initialize a session with task capabilities declared.""" + sampling = ( + mcp.types.SamplingCapability() + if session._sampling_callback != _default_sampling_callback + else None + ) + elicitation = ( + mcp.types.ElicitationCapability() + if session._elicitation_callback != _default_elicitation_callback + else None + ) + roots = ( + mcp.types.RootsCapability(listChanged=True) + if session._list_roots_callback != _default_list_roots_callback + else None + ) + + result = await session.send_request( + mcp.types.ClientRequest( + mcp.types.InitializeRequest( + params=mcp.types.InitializeRequestParams( + protocolVersion=mcp.types.LATEST_PROTOCOL_VERSION, + capabilities=mcp.types.ClientCapabilities( + sampling=sampling, + elicitation=elicitation, + experimental={"tasks": {}}, + roots=roots, + ), + clientInfo=session._client_info, + ), + ) + ), + mcp.types.InitializeResult, + ) + + if result.protocolVersion not in SUPPORTED_PROTOCOL_VERSIONS: + raise RuntimeError( + f"Unsupported protocol version from the server: {result.protocolVersion}" + ) + + session._server_capabilities = result.capabilities + + await session.send_notification( + mcp.types.ClientNotification(mcp.types.InitializedNotification()) + ) + + return result + + +class TaskNotificationHandler(MessageHandler): + """MessageHandler that routes task status notifications to Task objects.""" + + def __init__(self, client: Client): + super().__init__() + self._client_ref: weakref.ref[Client] = weakref.ref(client) + + async def dispatch(self, message: Message) -> None: + """Dispatch messages, including task status notifications.""" + if isinstance(message, mcp.types.ServerNotification): + if isinstance(message.root, TaskStatusNotification): + client = self._client_ref() + if client: + client._handle_task_status_notification(message.root) + + await super().dispatch(message) + + +TaskResultT = TypeVar("TaskResultT") + + +class Task(abc.ABC, Generic[TaskResultT]): + """ + Abstract base class for MCP background tasks (SEP-1686). + + Provides a uniform API whether the server accepts background execution + or executes synchronously (graceful degradation per SEP-1686). + + Subclasses: + - ToolTask: For tool calls (result type: CallToolResult) + - PromptTask: For prompts (future, result type: GetPromptResult) + - ResourceTask: For resources (future, result type: ReadResourceResult) + """ + + def __init__( + self, + client: Client, + task_id: str, + immediate_result: TaskResultT | None = None, + ): + """ + Create a Task wrapper. + + Args: + client: The FastMCP client + task_id: The task identifier + immediate_result: If server executed synchronously, the immediate result + """ + self._client = client + self._task_id = task_id + self._immediate_result = immediate_result + self._is_immediate = immediate_result is not None + + # Notification-based optimization (SEP-1686 notifications/tasks/status) + self._status_cache: GetTaskResult | None = None + self._status_event: asyncio.Event | None = None # Lazy init + self._status_callbacks: list[ + Callable[[GetTaskResult], None | Awaitable[None]] + ] = [] + self._cached_result: TaskResultT | None = None + + def _check_client_connected(self) -> None: + """Validate that client context is still active. + + Raises: + RuntimeError: If accessed outside client context (unless immediate) + """ + if self._is_immediate: + return # Already resolved, no client needed + + try: + _ = self._client.session + except RuntimeError as e: + raise RuntimeError( + "Cannot access task results outside client context. " + "Task futures must be used within 'async with client:' block." + ) from e + + @property + def task_id(self) -> str: + """Get the task ID.""" + return self._task_id + + @property + def returned_immediately(self) -> bool: + """Check if server executed the task immediately. + + Returns: + True if server executed synchronously (graceful degradation or no task support) + False if server accepted background execution + """ + return self._is_immediate + + def _handle_status_notification(self, status: GetTaskResult) -> None: + """Process incoming notifications/tasks/status (internal). + + Called by Client when a notification is received for this task. + Updates cache, triggers events, and invokes user callbacks. + + Args: + status: Task status from notification + """ + # Update cache for next status() call + self._status_cache = status + + # Wake up any wait() calls + if self._status_event is not None: + self._status_event.set() + + # Invoke user callbacks + for callback in self._status_callbacks: + try: + result = callback(status) + if inspect.isawaitable(result): + # Fire and forget async callbacks + asyncio.create_task(result) # noqa: RUF006 + except Exception as e: + logger.warning(f"Task callback error: {e}", exc_info=True) + + def on_status_change( + self, + callback: Callable[[GetTaskResult], None | Awaitable[None]], + ) -> None: + """Register callback for status change notifications. + + The callback will be invoked when a notifications/tasks/status is received + for this task (optional server feature per SEP-1686 lines 436-444). + + Supports both sync and async callbacks (auto-detected). + + Args: + callback: Function to call with GetTaskResult when status changes. + Can return None (sync) or Awaitable[None] (async). + + Example: + >>> task = await client.call_tool("slow_operation", {}, task=True) + >>> + >>> def on_update(status: GetTaskResult): + ... print(f"Task {status.taskId} is now {status.status}") + >>> + >>> task.on_status_change(on_update) + >>> result = await task # Callback fires when status changes + """ + self._status_callbacks.append(callback) + + async def status(self) -> GetTaskResult: + """Get current task status. + + If server executed immediately, returns synthetic completed status. + Otherwise queries the server for current status. + """ + self._check_client_connected() + + if self._is_immediate: + # Return synthetic completed status + now = datetime.now(timezone.utc) + return GetTaskResult( + taskId=self._task_id, + status="completed", + createdAt=now, + lastUpdatedAt=now, + ttl=None, + pollInterval=1000, + ) + + # Return cached status if available (from notification) + if self._status_cache is not None: + cached = self._status_cache + # Don't clear cache - keep it for next call + return cached + + # Query server and cache the result + self._status_cache = await self._client.get_task_status(self._task_id) + return self._status_cache + + @abc.abstractmethod + async def result(self) -> TaskResultT: + """Wait for and return the task result. + + Must be implemented by subclasses to return the appropriate result type. + """ + ... + + async def wait( + self, *, state: str | None = None, timeout: float = 300.0 + ) -> GetTaskResult: + """Wait for task to reach a specific state or complete. + + Uses event-based waiting when notifications are available (fast), + with fallback to polling (reliable). Optimally wakes up immediately + on status changes when server sends notifications/tasks/status. + + Args: + state: Desired state ('submitted', 'working', 'completed', 'failed'). + If None, waits for any terminal state (completed/failed) + timeout: Maximum time to wait in seconds + + Returns: + GetTaskResult: Final task status + + Raises: + TimeoutError: If desired state not reached within timeout + """ + self._check_client_connected() + + if self._is_immediate: + # Already done + return await self.status() + + # Initialize event for notification wake-ups + if self._status_event is None: + self._status_event = asyncio.Event() + + start = time.time() + terminal_states = {"completed", "failed", "cancelled"} + poll_interval = 0.5 # Fallback polling interval (500ms) + + while True: + # Check cached status first (updated by notifications) + if self._status_cache: + current = self._status_cache.status + if state is None: + if current in terminal_states: + return self._status_cache + elif current == state: + return self._status_cache + + # Check timeout + elapsed = time.time() - start + if elapsed >= timeout: + raise TimeoutError( + f"Task {self._task_id} did not reach {state or 'terminal state'} within {timeout}s" + ) + + remaining = timeout - elapsed + + # Wait for notification event OR poll timeout + try: + await asyncio.wait_for( + self._status_event.wait(), timeout=min(poll_interval, remaining) + ) + self._status_event.clear() + except asyncio.TimeoutError: + # Fallback: poll server (notification didn't arrive in time) + self._status_cache = await self._client.get_task_status(self._task_id) + + async def cancel(self) -> None: + """Cancel this task, transitioning it to cancelled state. + + Sends a tasks/cancel protocol request. The server will attempt to halt + execution and move the task to cancelled state. + + Note: If server executed immediately (graceful degradation), this is a no-op + as there's no server-side task to cancel. + """ + if self._is_immediate: + # No server-side task to cancel + return + self._check_client_connected() + await self._client.cancel_task(self._task_id) + # Invalidate cache to force fresh status fetch + self._status_cache = None + + def __await__(self): + """Allow 'await task' to get result.""" + return self.result().__await__() + + +class ToolTask(Task["CallToolResult"]): + """ + Represents a tool call that may execute in background or immediately. + + Provides a uniform API whether the server accepts background execution + or executes synchronously (graceful degradation per SEP-1686). + + Usage: + task = await client.call_tool_as_task("analyze", args) + + # Check status + status = await task.status() + + # Wait for completion + await task.wait() + + # Get result (waits if needed) + result = await task.result() # Returns CallToolResult + + # Or just await the task directly + result = await task + """ + + def __init__( + self, + client: Client, + task_id: str, + tool_name: str, + immediate_result: CallToolResult | None = None, + ): + """ + Create a ToolTask wrapper. + + Args: + client: The FastMCP client + task_id: The task identifier + tool_name: Name of the tool being executed + immediate_result: If server executed synchronously, the immediate result + """ + super().__init__(client, task_id, immediate_result) + self._tool_name = tool_name + + async def result(self) -> CallToolResult: + """Wait for and return the tool result. + + If server executed immediately, returns the immediate result. + Otherwise waits for background task to complete and retrieves result. + + Returns: + CallToolResult: The parsed tool result (same as call_tool returns) + """ + # Check cache first + if self._cached_result is not None: + return self._cached_result + + if self._is_immediate: + assert self._immediate_result is not None # Type narrowing + result = self._immediate_result + else: + # Check client connected + self._check_client_connected() + + # Wait for completion using event-based wait (respects notifications) + await self.wait() + + # Get the raw result (dict or CallToolResult) + raw_result = await self._client.get_task_result(self._task_id) + + # Convert to CallToolResult if needed and parse + if isinstance(raw_result, dict): + # Raw dict from get_task_result - parse as CallToolResult + mcp_result = mcp.types.CallToolResult.model_validate(raw_result) + result = await self._client._parse_call_tool_result( + self._tool_name, mcp_result, raise_on_error=True + ) + elif isinstance(raw_result, mcp.types.CallToolResult): + # Already a CallToolResult from MCP protocol - parse it + result = await self._client._parse_call_tool_result( + self._tool_name, raw_result, raise_on_error=True + ) + else: + # Legacy ToolResult format - convert to MCP type + if hasattr(raw_result, "content") and hasattr( + raw_result, "structured_content" + ): + mcp_result = mcp.types.CallToolResult( + content=raw_result.content, + structuredContent=raw_result.structured_content, # type: ignore[arg-type] + _meta=raw_result.meta, + ) + result = await self._client._parse_call_tool_result( + self._tool_name, mcp_result, raise_on_error=True + ) + else: + # Unknown type - just return it + result = raw_result # type: ignore[assignment] + + # Cache before returning + self._cached_result = result + return result + + +class PromptTask(Task[mcp.types.GetPromptResult]): + """ + Represents a prompt call that may execute in background or immediately. + + Provides a uniform API whether the server accepts background execution + or executes synchronously (graceful degradation per SEP-1686). + + Usage: + task = await client.get_prompt_as_task("analyze", args) + result = await task # Returns GetPromptResult + """ + + def __init__( + self, + client: Client, + task_id: str, + prompt_name: str, + immediate_result: mcp.types.GetPromptResult | None = None, + ): + """ + Create a PromptTask wrapper. + + Args: + client: The FastMCP client + task_id: The task identifier + prompt_name: Name of the prompt being executed + immediate_result: If server executed synchronously, the immediate result + """ + super().__init__(client, task_id, immediate_result) + self._prompt_name = prompt_name + + async def result(self) -> mcp.types.GetPromptResult: + """Wait for and return the prompt result. + + If server executed immediately, returns the immediate result. + Otherwise waits for background task to complete and retrieves result. + + Returns: + GetPromptResult: The prompt result with messages and description + """ + # Check cache first + if self._cached_result is not None: + return self._cached_result + + if self._is_immediate: + assert self._immediate_result is not None + result = self._immediate_result + else: + # Check client connected + self._check_client_connected() + + # Wait for completion using event-based wait (respects notifications) + await self.wait() + + # Get the raw MCP result + mcp_result = await self._client.get_task_result(self._task_id) + + # Parse as GetPromptResult + result = mcp.types.GetPromptResult.model_validate(mcp_result) + + # Cache before returning + self._cached_result = result + return result + + +class ResourceTask( + Task[list[mcp.types.TextResourceContents | mcp.types.BlobResourceContents]] +): + """ + Represents a resource read that may execute in background or immediately. + + Provides a uniform API whether the server accepts background execution + or executes synchronously (graceful degradation per SEP-1686). + + Usage: + task = await client.read_resource_as_task("file://data.txt") + contents = await task # Returns list[ReadResourceContents] + """ + + def __init__( + self, + client: Client, + task_id: str, + uri: str, + immediate_result: list[ + mcp.types.TextResourceContents | mcp.types.BlobResourceContents + ] + | None = None, + ): + """ + Create a ResourceTask wrapper. + + Args: + client: The FastMCP client + task_id: The task identifier + uri: URI of the resource being read + immediate_result: If server executed synchronously, the immediate result + """ + super().__init__(client, task_id, immediate_result) + self._uri = uri + + async def result( + self, + ) -> list[mcp.types.TextResourceContents | mcp.types.BlobResourceContents]: + """Wait for and return the resource contents. + + If server executed immediately, returns the immediate result. + Otherwise waits for background task to complete and retrieves result. + + Returns: + list[ReadResourceContents]: The resource contents + """ + # Check cache first + if self._cached_result is not None: + return self._cached_result + + if self._is_immediate: + assert self._immediate_result is not None + result = self._immediate_result + else: + # Check client connected + self._check_client_connected() + + # Wait for completion using event-based wait (respects notifications) + await self.wait() + + # Get the raw MCP result + mcp_result = await self._client.get_task_result(self._task_id) + + # Parse as ReadResourceResult or extract contents + if isinstance(mcp_result, mcp.types.ReadResourceResult): + # Already parsed by TasksResponse - extract contents + result = list(mcp_result.contents) + elif isinstance(mcp_result, dict) and "contents" in mcp_result: + # Dict format - parse each content item + parsed_contents = [] + for item in mcp_result["contents"]: + if isinstance(item, dict): + if "blob" in item: + parsed_contents.append( + mcp.types.BlobResourceContents.model_validate(item) + ) + else: + parsed_contents.append( + mcp.types.TextResourceContents.model_validate(item) + ) + else: + parsed_contents.append(item) + result = parsed_contents + else: + # Fallback - might be the list directly + result = mcp_result if isinstance(mcp_result, list) else [mcp_result] + + # Cache before returning + self._cached_result = result + return result diff --git a/src/fastmcp/client/transports.py b/src/fastmcp/client/transports.py index 81afc9c88b..0fb4be0c75 100644 --- a/src/fastmcp/client/transports.py +++ b/src/fastmcp/client/transports.py @@ -854,11 +854,25 @@ async def connect_session( anyio.create_task_group() as tg, _enter_server_lifespan(server=self.server), ): + # Build experimental capabilities + import fastmcp + + experimental_capabilities = {} + if fastmcp.settings.enable_tasks: + # Declare SEP-1686 task support (enable_tasks requires enable_docket via validator) + experimental_capabilities["tasks"] = { + "tools": True, + "prompts": True, + "resources": True, + } + tg.start_soon( lambda: self.server._mcp_server.run( server_read, server_write, - self.server._mcp_server.create_initialization_options(), + self.server._mcp_server.create_initialization_options( + experimental_capabilities=experimental_capabilities + ), raise_exceptions=self.raise_exceptions, ) ) diff --git a/src/fastmcp/dependencies.py b/src/fastmcp/dependencies.py new file mode 100644 index 0000000000..3cdc7d6b9c --- /dev/null +++ b/src/fastmcp/dependencies.py @@ -0,0 +1,25 @@ +"""Dependency injection exports for FastMCP. + +This module re-exports dependency injection symbols from Docket and FastMCP +to provide a clean, centralized import location for all dependency-related +functionality. +""" + +from docket import Depends + +from fastmcp.server.dependencies import ( + CurrentContext, + CurrentDocket, + CurrentFastMCP, + CurrentWorker, + Progress, +) + +__all__ = [ + "CurrentContext", + "CurrentDocket", + "CurrentFastMCP", + "CurrentWorker", + "Depends", + "Progress", +] diff --git a/src/fastmcp/prompts/prompt.py b/src/fastmcp/prompts/prompt.py index f8498237de..6da6ad6d91 100644 --- a/src/fastmcp/prompts/prompt.py +++ b/src/fastmcp/prompts/prompt.py @@ -5,7 +5,7 @@ import inspect import json from collections.abc import Awaitable, Callable, Sequence -from typing import Any +from typing import Annotated, Any import pydantic_core from mcp.types import ContentBlock, Icon, PromptMessage, Role, TextContent @@ -14,13 +14,12 @@ from pydantic import Field, TypeAdapter from fastmcp.exceptions import PromptError -from fastmcp.server.dependencies import get_context +from fastmcp.server.dependencies import get_context, without_injected_parameters from fastmcp.utilities.components import FastMCPComponent from fastmcp.utilities.json_schema import compress_schema from fastmcp.utilities.logging import get_logger from fastmcp.utilities.types import ( FastMCPBaseModel, - find_kwarg_by_type, get_cached_typeadapter, ) @@ -67,6 +66,12 @@ class Prompt(FastMCPComponent): arguments: list[PromptArgument] | None = Field( default=None, description="Arguments that can be passed to the prompt" ) + task: Annotated[ + bool, + Field( + description="Whether this prompt supports background task execution (SEP-1686)" + ), + ] = False def enable(self) -> None: super().enable() @@ -121,6 +126,7 @@ def from_function( tags: set[str] | None = None, enabled: bool | None = None, meta: dict[str, Any] | None = None, + task: bool | None = None, ) -> FunctionPrompt: """Create a Prompt from a function. @@ -139,6 +145,7 @@ def from_function( tags=tags, enabled=enabled, meta=meta, + task=task, ) async def render( @@ -169,6 +176,7 @@ def from_function( tags: set[str] | None = None, enabled: bool | None = None, meta: dict[str, Any] | None = None, + task: bool | None = None, ) -> FunctionPrompt: """Create a Prompt from a function. @@ -178,7 +186,6 @@ def from_function( - A dict (converted to a message) - A sequence of any of the above """ - from fastmcp.server.context import Context func_name = name or getattr(fn, "__name__", None) or fn.__class__.__name__ @@ -201,15 +208,11 @@ def from_function( if isinstance(fn, staticmethod): fn = fn.__func__ - type_adapter = get_cached_typeadapter(fn) + # Wrap fn to handle dependency resolution internally + wrapped_fn = without_injected_parameters(fn) + type_adapter = get_cached_typeadapter(wrapped_fn) parameters = type_adapter.json_schema() - - # Auto-detect context parameter if not provided - - context_kwarg = find_kwarg_by_type(fn, kwarg_type=Context) - prune_params = [context_kwarg] if context_kwarg else None - - parameters = compress_schema(parameters, prune_params=prune_params) + parameters = compress_schema(parameters, prune_titles=True) # Convert parameters to PromptArguments arguments: list[PromptArgument] = [] @@ -224,7 +227,6 @@ def from_function( if ( sig_param.annotation != inspect.Parameter.empty and sig_param.annotation is not str - and param_name != context_kwarg ): # Get the JSON schema for this specific parameter type try: @@ -260,29 +262,23 @@ def from_function( arguments=arguments, tags=tags or set(), enabled=enabled if enabled is not None else True, - fn=fn, + fn=wrapped_fn, meta=meta, + task=task if task is not None else False, ) def _convert_string_arguments(self, kwargs: dict[str, Any]) -> dict[str, Any]: """Convert string arguments to expected types based on function signature.""" - from fastmcp.server.context import Context + from fastmcp.server.dependencies import without_injected_parameters - sig = inspect.signature(self.fn) + wrapper_fn = without_injected_parameters(self.fn) + sig = inspect.signature(wrapper_fn) converted_kwargs = {} - # Find context parameter name if any - context_param_name = find_kwarg_by_type(self.fn, kwarg_type=Context) - for param_name, param_value in kwargs.items(): if param_name in sig.parameters: param = sig.parameters[param_name] - # Skip Context parameters - they're handled separately - if param_name == context_param_name: - converted_kwargs[param_name] = param_value - continue - # If parameter has no annotation or annotation is str, pass as-is if ( param.annotation == inspect.Parameter.empty @@ -320,8 +316,6 @@ async def render( arguments: dict[str, Any] | None = None, ) -> list[PromptMessage]: """Render the prompt with arguments.""" - from fastmcp.server.context import Context - # Validate required arguments if self.arguments: required = {arg.name for arg in self.arguments if arg.required} @@ -331,16 +325,14 @@ async def render( raise ValueError(f"Missing required arguments: {missing}") try: - # Prepare arguments with context + # Prepare arguments kwargs = arguments.copy() if arguments else {} - context_kwarg = find_kwarg_by_type(self.fn, kwarg_type=Context) - if context_kwarg and context_kwarg not in kwargs: - kwargs[context_kwarg] = get_context() - # Convert string arguments to expected types when needed + # Convert string arguments to expected types BEFORE validation kwargs = self._convert_string_arguments(kwargs) - # Call function and check if result is a coroutine + # self.fn is wrapped by without_injected_parameters which handles + # dependency resolution internally result = self.fn(**kwargs) if inspect.isawaitable(result): result = await result diff --git a/src/fastmcp/resources/resource.py b/src/fastmcp/resources/resource.py index f27070447a..d3a40bbc84 100644 --- a/src/fastmcp/resources/resource.py +++ b/src/fastmcp/resources/resource.py @@ -19,10 +19,9 @@ ) from typing_extensions import Self -from fastmcp.server.dependencies import get_context +from fastmcp.server.dependencies import get_context, without_injected_parameters from fastmcp.utilities.components import FastMCPComponent from fastmcp.utilities.types import ( - find_kwarg_by_type, get_fn_name, ) @@ -48,6 +47,12 @@ class Resource(FastMCPComponent): Annotations | None, Field(description="Optional annotations about the resource's behavior"), ] = None + task: Annotated[ + bool, + Field( + description="Whether this resource supports background task execution (SEP-1686)" + ), + ] = False def enable(self) -> None: super().enable() @@ -78,6 +83,7 @@ def from_function( enabled: bool | None = None, annotations: Annotations | None = None, meta: dict[str, Any] | None = None, + task: bool | None = None, ) -> FunctionResource: return FunctionResource.from_function( fn=fn, @@ -91,6 +97,7 @@ def from_function( enabled=enabled, annotations=annotations, meta=meta, + task=task, ) @field_validator("mime_type", mode="before") @@ -184,12 +191,17 @@ def from_function( enabled: bool | None = None, annotations: Annotations | None = None, meta: dict[str, Any] | None = None, + task: bool | None = None, ) -> FunctionResource: """Create a FunctionResource from a function.""" if isinstance(uri, str): uri = AnyUrl(uri) + + # Wrap fn to handle dependency resolution internally + wrapped_fn = without_injected_parameters(fn) + return cls( - fn=fn, + fn=wrapped_fn, uri=uri, name=name or get_fn_name(fn), title=title, @@ -200,18 +212,14 @@ def from_function( enabled=enabled if enabled is not None else True, annotations=annotations, meta=meta, + task=task if task is not None else False, ) async def read(self) -> str | bytes: """Read the resource by calling the wrapped function.""" - from fastmcp.server.context import Context - - kwargs = {} - context_kwarg = find_kwarg_by_type(self.fn, kwarg_type=Context) - if context_kwarg is not None: - kwargs[context_kwarg] = get_context() - - result = self.fn(**kwargs) + # self.fn is wrapped by without_injected_parameters which handles + # dependency resolution internally + result = self.fn() if inspect.isawaitable(result): result = await result diff --git a/src/fastmcp/resources/template.py b/src/fastmcp/resources/template.py index cde1d273cf..2a87e8949f 100644 --- a/src/fastmcp/resources/template.py +++ b/src/fastmcp/resources/template.py @@ -5,7 +5,7 @@ import inspect import re from collections.abc import Callable -from typing import Any +from typing import Annotated, Any from urllib.parse import parse_qs, unquote from mcp.types import Annotations, Icon @@ -17,13 +17,10 @@ ) from fastmcp.resources.resource import Resource -from fastmcp.server.dependencies import get_context +from fastmcp.server.dependencies import get_context, without_injected_parameters from fastmcp.utilities.components import FastMCPComponent from fastmcp.utilities.json_schema import compress_schema -from fastmcp.utilities.types import ( - find_kwarg_by_type, - get_cached_typeadapter, -) +from fastmcp.utilities.types import get_cached_typeadapter def extract_query_params(uri_template: str) -> set[str]: @@ -106,6 +103,12 @@ class ResourceTemplate(FastMCPComponent): annotations: Annotations | None = Field( default=None, description="Optional annotations about the resource's behavior" ) + task: Annotated[ + bool, + Field( + description="Whether this resource template supports background task execution (SEP-1686)" + ), + ] = False def __repr__(self) -> str: return f"{self.__class__.__name__}(uri_template={self.uri_template!r}, name={self.name!r}, description={self.description!r}, tags={self.tags})" @@ -139,6 +142,7 @@ def from_function( enabled: bool | None = None, annotations: Annotations | None = None, meta: dict[str, Any] | None = None, + task: bool | None = None, ) -> FunctionResourceTemplate: return FunctionResourceTemplate.from_function( fn=fn, @@ -152,6 +156,7 @@ def from_function( enabled=enabled, annotations=annotations, meta=meta, + task=task, ) @field_validator("mime_type", mode="before") @@ -188,6 +193,7 @@ async def resource_read_fn() -> str | bytes: mime_type=self.mime_type, tags=self.tags, enabled=self.enabled, + task=self.task, ) def to_mcp_template( @@ -242,42 +248,33 @@ class FunctionResourceTemplate(ResourceTemplate): async def read(self, arguments: dict[str, Any]) -> str | bytes: """Read the resource content.""" - from fastmcp.server.context import Context - - # Add context to parameters if needed - kwargs = arguments.copy() - context_kwarg = find_kwarg_by_type(self.fn, kwarg_type=Context) - if context_kwarg and context_kwarg not in kwargs: - kwargs[context_kwarg] = get_context() - # Type coercion for query parameters (which arrive as strings) - # Get function signature for type hints + kwargs = arguments.copy() sig = inspect.signature(self.fn) for param_name, param_value in list(kwargs.items()): if param_name in sig.parameters and isinstance(param_value, str): param = sig.parameters[param_name] annotation = param.annotation - # Skip if no annotation or annotation is str if annotation is inspect.Parameter.empty or annotation is str: continue - # Handle common type coercions try: if annotation is int: kwargs[param_name] = int(param_value) elif annotation is float: kwargs[param_name] = float(param_value) elif annotation is bool: - # Handle boolean strings kwargs[param_name] = param_value.lower() in ("true", "1", "yes") except (ValueError, AttributeError): - # Let validate_call handle the error pass + # self.fn is wrapped by without_injected_parameters which handles + # dependency resolution internally, so we call it directly result = self.fn(**kwargs) if inspect.isawaitable(result): result = await result + return result @classmethod @@ -294,9 +291,9 @@ def from_function( enabled: bool | None = None, annotations: Annotations | None = None, meta: dict[str, Any] | None = None, + task: bool | None = None, ) -> FunctionResourceTemplate: """Create a template from a function.""" - from fastmcp.server.context import Context func_name = name or getattr(fn, "__name__", None) or fn.__class__.__name__ if func_name == "": @@ -311,10 +308,6 @@ def from_function( "Functions with *args are not supported as resource templates" ) - # Auto-detect context parameter if not provided - - context_kwarg = find_kwarg_by_type(fn, kwarg_type=Context) - # Extract path and query parameters from URI template path_params = set(re.findall(r"{(\w+)(?:\*)?}", uri_template)) query_params = extract_query_params(uri_template) @@ -323,24 +316,23 @@ def from_function( if not all_uri_params: raise ValueError("URI template must contain at least one parameter") - func_params = set(sig.parameters.keys()) - if context_kwarg: - func_params.discard(context_kwarg) + # Use wrapper to get user-facing parameters (excludes injected params) + wrapper_fn = without_injected_parameters(fn) + user_sig = inspect.signature(wrapper_fn) + func_params = set(user_sig.parameters.keys()) # Get required and optional function parameters required_params = { p for p in func_params - if sig.parameters[p].default is inspect.Parameter.empty - and sig.parameters[p].kind != inspect.Parameter.VAR_KEYWORD - and p != context_kwarg + if user_sig.parameters[p].default is inspect.Parameter.empty + and user_sig.parameters[p].kind != inspect.Parameter.VAR_KEYWORD } optional_params = { p for p in func_params - if sig.parameters[p].default is not inspect.Parameter.empty - and sig.parameters[p].kind != inspect.Parameter.VAR_KEYWORD - and p != context_kwarg + if user_sig.parameters[p].default is not inspect.Parameter.empty + and user_sig.parameters[p].kind != inspect.Parameter.VAR_KEYWORD } # Validate RFC 6570 query parameters @@ -377,15 +369,13 @@ def from_function( if isinstance(fn, staticmethod): fn = fn.__func__ - type_adapter = get_cached_typeadapter(fn) + wrapper_fn = without_injected_parameters(fn) + type_adapter = get_cached_typeadapter(wrapper_fn) parameters = type_adapter.json_schema() + parameters = compress_schema(parameters, prune_titles=True) - # compress the schema - prune_params = [context_kwarg] if context_kwarg else None - parameters = compress_schema(parameters, prune_params=prune_params) - - # ensure the arguments are properly cast - fn = validate_call(fn) + # Use validate_call on wrapper for runtime type coercion + fn = validate_call(wrapper_fn) return cls( uri_template=uri_template, @@ -400,4 +390,5 @@ def from_function( enabled=enabled if enabled is not None else True, annotations=annotations, meta=meta, + task=task if task is not None else False, ) diff --git a/src/fastmcp/server/context.py b/src/fastmcp/server/context.py index ef1256c683..3cc7e32cd3 100644 --- a/src/fastmcp/server/context.py +++ b/src/fastmcp/server/context.py @@ -166,6 +166,12 @@ async def __aenter__(self) -> Context: # Always set this context and save the token token = _current_context.set(self) self._tokens.append(token) + + # Set current server for dependency injection (use weakref to avoid reference cycles) + from fastmcp.server.dependencies import _current_server + + self._server_token = _current_server.set(weakref.ref(self.fastmcp)) + return self async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: @@ -173,6 +179,14 @@ async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: # Flush any remaining notifications before exiting await self._flush_notifications() + # Reset server token + if hasattr(self, "_server_token"): + from fastmcp.server.dependencies import _current_server + + _current_server.reset(self._server_token) + delattr(self, "_server_token") + + # Reset context token if self._tokens: token = self._tokens.pop() _current_context.reset(token) @@ -272,7 +286,8 @@ async def read_resource(self, uri: str | AnyUrl) -> list[ReadResourceContents]: Returns: The resource content as either text or bytes """ - return await self.fastmcp._read_resource_mcp(uri) + # Context calls don't have task metadata, so always returns list + return await self.fastmcp._read_resource_mcp(uri) # type: ignore[return-value] async def log( self, diff --git a/src/fastmcp/server/dependencies.py b/src/fastmcp/server/dependencies.py index 68f22e6061..51abc1f9cf 100644 --- a/src/fastmcp/server/dependencies.py +++ b/src/fastmcp/server/dependencies.py @@ -1,8 +1,16 @@ from __future__ import annotations import contextlib -from typing import TYPE_CHECKING +import inspect +import weakref +from collections.abc import AsyncGenerator, Callable +from contextlib import AsyncExitStack, asynccontextmanager +from contextvars import ContextVar +from functools import lru_cache +from typing import TYPE_CHECKING, Any, cast, get_type_hints +from docket.dependencies import Dependency, _Depends, get_dependency_parameters +from docket.dependencies import Progress as DocketProgress from mcp.server.auth.middleware.auth_context import ( get_access_token as _sdk_get_access_token, ) @@ -15,20 +23,233 @@ from fastmcp.server.auth import AccessToken from fastmcp.server.http import _current_http_request +from fastmcp.utilities.types import is_class_member_of_type if TYPE_CHECKING: + from docket import Docket + from docket.worker import Worker + from fastmcp.server.context import Context + from fastmcp.server.server import FastMCP + +# ContextVars for tracking Docket infrastructure +_current_docket: ContextVar[Docket | None] = ContextVar("docket", default=None) # type: ignore[assignment] +_current_worker: ContextVar[Worker | None] = ContextVar("worker", default=None) # type: ignore[assignment] +_current_server: ContextVar[weakref.ref[FastMCP] | None] = ContextVar( # type: ignore[invalid-assignment] + "server", default=None +) __all__ = [ "AccessToken", + "CurrentContext", + "CurrentDocket", + "CurrentFastMCP", + "CurrentWorker", + "Progress", "get_access_token", "get_context", "get_http_headers", "get_http_request", + "get_server", + "resolve_dependencies", + "without_injected_parameters", ] -# --- Context --- +def _find_kwarg_by_type(fn: Callable, kwarg_type: type) -> str | None: + """Find the name of the kwarg that is of type kwarg_type. + + This is the legacy dependency injection approach, used specifically for + injecting the Context object when a function parameter is typed as Context. + + Includes union types that contain the kwarg_type, as well as Annotated types. + """ + + if inspect.ismethod(fn) and hasattr(fn, "__func__"): + fn = fn.__func__ + + try: + type_hints = get_type_hints(fn, include_extras=True) + except Exception: + type_hints = getattr(fn, "__annotations__", {}) + + sig = inspect.signature(fn) + for name, param in sig.parameters.items(): + annotation = type_hints.get(name, param.annotation) + if is_class_member_of_type(annotation, kwarg_type): + return name + return None + + +@lru_cache(maxsize=5000) +def without_injected_parameters(fn: Callable[..., Any]) -> Callable[..., Any]: + """Create a wrapper function without injected parameters. + + Returns a wrapper that excludes Context and Docket dependency parameters, + making it safe to use with Pydantic TypeAdapter for schema generation and + validation. The wrapper internally handles all dependency resolution and + Context injection when called. + + Args: + fn: Original function with Context and/or dependencies + + Returns: + Async wrapper function without injected parameters + """ + from fastmcp.server.context import Context + + # Identify parameters to exclude + context_kwarg = _find_kwarg_by_type(fn, Context) + dependency_params = get_dependency_parameters(fn) + + exclude = set() + if context_kwarg: + exclude.add(context_kwarg) + if dependency_params: + exclude.update(dependency_params.keys()) + + if not exclude: + return fn + + # Build new signature with only user parameters + sig = inspect.signature(fn) + user_params = [ + param for name, param in sig.parameters.items() if name not in exclude + ] + new_sig = inspect.Signature(user_params) + + # Create async wrapper that handles dependency resolution + async def wrapper(**user_kwargs: Any) -> Any: + async with resolve_dependencies(fn, user_kwargs) as resolved_kwargs: + result = fn(**resolved_kwargs) + if inspect.isawaitable(result): + result = await result + return result + + # Set wrapper metadata (only parameter annotations, not return type) + wrapper.__signature__ = new_sig # type: ignore + wrapper.__annotations__ = { + k: v + for k, v in getattr(fn, "__annotations__", {}).items() + if k not in exclude and k != "return" + } + wrapper.__name__ = getattr(fn, "__name__", "wrapper") + wrapper.__doc__ = getattr(fn, "__doc__", None) + + return wrapper + + +@asynccontextmanager +async def _resolve_fastmcp_dependencies( + fn: Callable[..., Any], arguments: dict[str, Any] +) -> AsyncGenerator[dict[str, Any], None]: + """Resolve Docket dependencies for a FastMCP function. + + Sets up the minimal context needed for Docket's Depends() to work: + - A cache for resolved dependencies + - An AsyncExitStack for managing context manager lifetimes + + The Docket instance (for CurrentDocket dependency) is managed separately + by the server's lifespan and made available via ContextVar. + + Note: This does NOT set up Docket's Execution context. If user code needs + Docket-specific dependencies like TaskArgument(), TaskKey(), etc., those + will fail with clear errors about missing context. + + Args: + fn: The function to resolve dependencies for + arguments: The arguments passed to the function + + Yields: + Dictionary of resolved dependencies merged with provided arguments + """ + dependency_params = get_dependency_parameters(fn) + + if not dependency_params: + yield arguments + return + + # Initialize dependency cache and exit stack + cache_token = _Depends.cache.set({}) + try: + async with AsyncExitStack() as stack: + stack_token = _Depends.stack.set(stack) + try: + resolved: dict[str, Any] = {} + + for parameter, dependency in dependency_params.items(): + # If argument was explicitly provided, use that instead + if parameter in arguments: + resolved[parameter] = arguments[parameter] + continue + + # Resolve the dependency + try: + resolved[parameter] = await stack.enter_async_context( + dependency + ) + except Exception as error: + fn_name = getattr(fn, "__name__", repr(fn)) + raise RuntimeError( + f"Failed to resolve dependency '{parameter}' for {fn_name}" + ) from error + + # Merge resolved dependencies with provided arguments + final_arguments = {**arguments, **resolved} + + yield final_arguments + finally: + _Depends.stack.reset(stack_token) + finally: + _Depends.cache.reset(cache_token) + + +@asynccontextmanager +async def resolve_dependencies( + fn: Callable[..., Any], arguments: dict[str, Any] +) -> AsyncGenerator[dict[str, Any], None]: + """Resolve dependencies and inject Context for a FastMCP function. + + This function: + 1. Filters out any dependency parameter names from user arguments (security) + 2. Resolves Docket dependencies + 3. Injects Context if needed + 4. Merges everything together + + The filtering prevents external callers from overriding injected parameters by + providing values for dependency parameter names. This is a security feature. + + Args: + fn: The function to resolve dependencies for + arguments: User arguments (may contain keys that match dependency names, + which will be filtered out) + + Yields: + Dictionary of filtered user args + resolved dependencies + Context + + Example: + ```python + async with resolve_dependencies(my_tool, {"name": "Alice"}) as kwargs: + result = my_tool(**kwargs) + if inspect.isawaitable(result): + result = await result + ``` + """ + from fastmcp.server.context import Context + + # Filter out dependency parameters from user arguments to prevent override + # This is a security measure - external callers should never be able to + # provide values for injected parameters + dependency_params = get_dependency_parameters(fn) + user_args = {k: v for k, v in arguments.items() if k not in dependency_params} + + async with _resolve_fastmcp_dependencies(fn, user_args) as resolved_kwargs: + # Inject Context if needed + context_kwarg = _find_kwarg_by_type(fn, kwarg_type=Context) + if context_kwarg and context_kwarg not in resolved_kwargs: + resolved_kwargs[context_kwarg] = get_context() + + yield resolved_kwargs def get_context() -> Context: @@ -40,7 +261,271 @@ def get_context() -> Context: return context -# --- HTTP Request --- +class _CurrentContext(Dependency): + """Internal dependency class for CurrentContext.""" + + async def __aenter__(self) -> Context: + return get_context() + + +def CurrentContext() -> Context: + """Get the current FastMCP Context instance. + + This dependency provides access to the active FastMCP Context for the + current MCP operation (tool/resource/prompt call). + + Returns: + A dependency that resolves to the active Context instance + + Raises: + RuntimeError: If no active context found (during resolution) + + Example: + ```python + from fastmcp.dependencies import CurrentContext + + @mcp.tool() + async def log_progress(ctx: Context = CurrentContext()) -> str: + ctx.report_progress(50, 100, "Halfway done") + return "Working" + ``` + """ + return cast("Context", _CurrentContext()) + + +class _CurrentDocket(Dependency): + """Internal dependency class for CurrentDocket.""" + + async def __aenter__(self) -> Docket: + import fastmcp + + # Check if flag is enabled + if not fastmcp.settings.enable_docket: + raise RuntimeError( + "Docket support is not enabled. " + "Set FASTMCP_ENABLE_DOCKET=true to enable Docket support." + ) + + # Get Docket from ContextVar (set by _docket_lifespan) + docket = _current_docket.get() + if docket is None: + raise RuntimeError( + "No Docket instance found. This should not happen when " + "FASTMCP_ENABLE_DOCKET is enabled." + ) + + return docket + + +def CurrentDocket() -> Docket: + """Get the current Docket instance managed by FastMCP. + + This dependency provides access to the Docket instance that FastMCP + automatically creates when Docket support is enabled. + + Requires: + - FASTMCP_ENABLE_DOCKET=true + + Returns: + A dependency that resolves to the active Docket instance + + Raises: + RuntimeError: If flag not enabled (during resolution) + + Example: + ```python + from fastmcp.dependencies import CurrentDocket + + @mcp.tool() + async def schedule_task(docket: Docket = CurrentDocket()) -> str: + await docket.add(some_function)(arg1, arg2) + return "Scheduled" + ``` + """ + return cast("Docket", _CurrentDocket()) + + +class _CurrentWorker(Dependency): + """Internal dependency class for CurrentWorker.""" + + async def __aenter__(self) -> Worker: + import fastmcp + + if not fastmcp.settings.enable_docket: + raise RuntimeError( + "Docket support is not enabled. " + "Set FASTMCP_ENABLE_DOCKET=true to enable Docket support." + ) + + worker = _current_worker.get() + if worker is None: + raise RuntimeError( + "No Worker instance found. This should not happen when " + "FASTMCP_ENABLE_DOCKET is enabled." + ) + + return worker + + +def CurrentWorker() -> Worker: + """Get the current Docket Worker instance managed by FastMCP. + + This dependency provides access to the Worker instance that FastMCP + automatically creates when Docket support is enabled. + + Requires: + - FASTMCP_ENABLE_DOCKET=true + + Returns: + A dependency that resolves to the active Worker instance + + Raises: + RuntimeError: If flag not enabled (during resolution) + + Example: + ```python + from fastmcp.dependencies import CurrentWorker + + @mcp.tool() + async def check_worker_status(worker: Worker = CurrentWorker()) -> str: + return f"Worker: {worker.name}" + ``` + """ + return cast("Worker", _CurrentWorker()) + + +class InMemoryProgress(DocketProgress): + """In-memory progress tracker for immediate tool execution. + + Provides the same interface as Progress but stores state in memory + instead of Redis. Useful for testing and immediate execution where + progress doesn't need to be observable across processes. + """ + + def __init__(self) -> None: + super().__init__() + self._current: int | None = None + self._total: int = 1 + self._message: str | None = None + + async def __aenter__(self) -> DocketProgress: + return self + + @property + def current(self) -> int | None: + return self._current + + @property + def total(self) -> int: + return self._total + + @property + def message(self) -> str | None: + return self._message + + async def set_total(self, total: int) -> None: + """Set the total/target value for progress tracking.""" + if total < 1: + raise ValueError("Total must be at least 1") + self._total = total + + async def increment(self, amount: int = 1) -> None: + """Atomically increment the current progress value.""" + if amount < 1: + raise ValueError("Amount must be at least 1") + if self._current is None: + self._current = amount + else: + self._current += amount + + async def set_message(self, message: str | None) -> None: + """Update the progress status message.""" + self._message = message + + +class Progress(DocketProgress): + """FastMCP Progress dependency that works in both server and worker contexts. + + Extends Docket's Progress to handle two execution modes: + - In Docket worker: Uses the execution's progress (standard Docket behavior) + - In FastMCP server: Uses in-memory progress (not observable remotely) + + This allows tools to use Progress() regardless of whether they're called + immediately or as background tasks. + """ + + async def __aenter__(self) -> DocketProgress: + # Try to get execution from Docket worker context + try: + return await super().__aenter__() + except LookupError: + # Not in worker context - return in-memory progress + docket = _current_docket.get() + if docket is None: + raise RuntimeError( + "Progress dependency requires Docket to be enabled. " + "Set FASTMCP_ENABLE_DOCKET=true" + ) from None + + # Return in-memory progress for immediate execution + return InMemoryProgress() + + +class _CurrentFastMCP(Dependency): + """Internal dependency class for CurrentFastMCP.""" + + async def __aenter__(self): + server_ref = _current_server.get() + if server_ref is None: + raise RuntimeError("No FastMCP server instance in context") + server = server_ref() + if server is None: + raise RuntimeError("FastMCP server instance is no longer available") + return server + + +def CurrentFastMCP(): + """Get the current FastMCP server instance. + + This dependency provides access to the active FastMCP server. + + Returns: + A dependency that resolves to the active FastMCP server + + Raises: + RuntimeError: If no server in context (during resolution) + + Example: + ```python + from fastmcp.dependencies import CurrentFastMCP + + @mcp.tool() + async def introspect(server: FastMCP = CurrentFastMCP()) -> str: + return f"Server: {server.name}" + ``` + """ + from fastmcp.server.server import FastMCP + + return cast(FastMCP, _CurrentFastMCP()) + + +def get_server(): + """Get the current FastMCP server instance directly. + + Returns: + The active FastMCP server + + Raises: + RuntimeError: If no server in context + """ + + server_ref = _current_server.get() + if server_ref is None: + raise RuntimeError("No FastMCP server instance in context") + server = server_ref() + if server is None: + raise RuntimeError("FastMCP server instance is no longer available") + return server def get_http_request() -> Request: @@ -105,9 +590,6 @@ def get_http_headers(include_all: bool = False) -> dict[str, str]: return {} -# --- Access Token --- - - def get_access_token() -> AccessToken | None: """ Get the FastMCP access token from the current context. diff --git a/src/fastmcp/server/http.py b/src/fastmcp/server/http.py index 8e89650ca9..6916803df7 100644 --- a/src/fastmcp/server/http.py +++ b/src/fastmcp/server/http.py @@ -19,6 +19,7 @@ from starlette.routing import BaseRoute, Mount, Route from starlette.types import Lifespan, Receive, Scope, Send +import fastmcp from fastmcp.server.auth import AuthProvider from fastmcp.server.auth.middleware import RequireAuthMiddleware from fastmcp.utilities.logging import get_logger @@ -158,10 +159,27 @@ def create_sse_app( # Create handler for SSE connections async def handle_sse(scope: Scope, receive: Receive, send: Send) -> Response: async with sse.connect_sse(scope, receive, send) as streams: + # Build experimental capabilities + experimental_capabilities = {} + if fastmcp.settings.enable_tasks: + # Declare SEP-1686 task support per final spec (lines 49-63) + # Nested structure: {list: {}, cancel: {}, requests: {tools: {call: {}}}} + experimental_capabilities["tasks"] = { + "list": {}, + "cancel": {}, + "requests": { + "tools": {"call": {}}, + "prompts": {"get": {}}, + "resources": {"read": {}}, + }, + } + await server._mcp_server.run( streams[0], streams[1], - server._mcp_server.create_initialization_options(), + server._mcp_server.create_initialization_options( + experimental_capabilities=experimental_capabilities + ), ) return Response() diff --git a/src/fastmcp/server/low_level.py b/src/fastmcp/server/low_level.py index dd8c61ec82..e2f2603aef 100644 --- a/src/fastmcp/server/low_level.py +++ b/src/fastmcp/server/low_level.py @@ -35,6 +35,8 @@ class MiddlewareServerSession(ServerSession): def __init__(self, fastmcp: FastMCP, *args, **kwargs): super().__init__(*args, **kwargs) self._fastmcp_ref: weakref.ref[FastMCP] = weakref.ref(fastmcp) + # Task group for subscription tasks (set during session run) + self._subscription_task_group: anyio.TaskGroup | None = None # type: ignore[valid-type] @property def fastmcp(self) -> FastMCP: @@ -49,11 +51,10 @@ async def _received_request( responder: RequestResponder[mcp.types.ClientRequest, mcp.types.ServerResult], ): """ - Override the _received_request method to route initialization requests + Override the _received_request method to route special requests through FastMCP middleware. - These are not handled by routes that FastMCP typically overrides and - require special handling. + Handles initialization requests and SEP-1686 task methods. """ import fastmcp.server.context from fastmcp.server.middleware.middleware import MiddlewareContext @@ -106,8 +107,9 @@ async def call_original_handler( return await self.fastmcp._apply_middleware( mw_context, call_original_handler ) - else: - return await super()._received_request(responder) + + # Fall through to default handling (task methods now handled via registered handlers) + return await super()._received_request(responder) class LowLevelServer(_Server[LifespanResultT, RequestT]): @@ -170,6 +172,9 @@ async def run( ) async with anyio.create_task_group() as tg: + # Store task group on session for subscription tasks (SEP-1686) + session._subscription_task_group = tg + async for message in session.incoming_messages: tg.start_soon( self._handle_message, diff --git a/src/fastmcp/server/server.py b/src/fastmcp/server/server.py index d7cd94cebb..0d07204041 100644 --- a/src/fastmcp/server/server.py +++ b/src/fastmcp/server/server.py @@ -2,10 +2,12 @@ from __future__ import annotations +import asyncio import inspect import re import secrets import warnings +import weakref from collections.abc import ( AsyncIterator, Awaitable, @@ -28,6 +30,7 @@ import httpx import mcp.types import uvicorn +from docket import Docket, Worker from mcp.server.lowlevel.helper_types import ReadResourceContents from mcp.server.lowlevel.server import LifespanResultT, NotificationOptions from mcp.server.stdio import stdio_server @@ -67,6 +70,11 @@ ) from fastmcp.server.low_level import LowLevelServer from fastmcp.server.middleware import Middleware, MiddlewareContext +from fastmcp.server.tasks.handlers import ( + handle_prompt_as_task, + handle_resource_as_task, + handle_tool_as_task, +) from fastmcp.settings import Settings from fastmcp.tools.tool import FunctionTool, Tool, ToolResult from fastmcp.tools.tool_manager import ToolManager @@ -160,6 +168,7 @@ def __init__( on_duplicate_resources: DuplicateBehavior | None = None, on_duplicate_prompts: DuplicateBehavior | None = None, strict_input_validation: bool | None = None, + tasks: bool | None = None, # --- # --- # --- The following arguments are DEPRECATED --- @@ -177,6 +186,14 @@ def __init__( sampling_handler: ServerSamplingHandler[LifespanResultT] | None = None, sampling_handler_behavior: Literal["always", "fallback"] | None = None, ): + # Resolve server default for background task support + self._support_tasks_by_default: bool = ( + tasks if tasks is not None else fastmcp.settings.enable_tasks + ) + + # Docket instance (set during lifespan for cross-task access) + self._docket = None + self._additional_http_routes: list[BaseRoute] = [] self._mounted_servers: list[MountedServer] = [] self._tool_manager: ToolManager = ToolManager( @@ -257,7 +274,6 @@ def __init__( else fastmcp.settings.include_fastmcp_meta ) - # handle deprecated settings self._handle_deprecated_settings( log_level=log_level, debug=debug, @@ -350,13 +366,135 @@ def icons(self) -> list[mcp.types.Icon]: else: return list(self._mcp_server.icons) + @property + def docket(self) -> Docket | None: + """Get the Docket instance if Docket support is enabled. + + Returns None if Docket is not enabled or server hasn't been started yet. + """ + return self._docket + + @asynccontextmanager + async def _docket_lifespan( + self, user_lifespan_result: LifespanResultT + ) -> AsyncIterator[LifespanResultT]: + """Manage Docket instance and Worker when experimental support is enabled. + + Args: + user_lifespan_result: The result from the user's lifespan function + + Yields: + User's lifespan result (Docket is managed via ContextVar, not lifespan result) + """ + from fastmcp import settings + from fastmcp.server.dependencies import _current_docket, _current_worker + + # Validate configuration + if settings.enable_tasks and not settings.enable_docket: + raise RuntimeError( + "Server requires enable_docket=True when enable_tasks=True. " + "Task protocol support needs Docket for background execution." + ) + + if not settings.enable_docket: + # Docket support not enabled, pass through user lifespan result + yield user_lifespan_result + return + + # Set FastMCP server in ContextVar so CurrentFastMCP can access it (use weakref to avoid reference cycles) + from fastmcp.server.dependencies import _current_server + + server_token = _current_server.set(weakref.ref(self)) + + try: + # Create Docket instance using configured name and URL + async with Docket( + name=settings.docket.name, + url=settings.docket.url, + ) as docket: + # Store on server instance for cross-task access (FastMCPTransport) + self._docket = docket + + # Register task-enabled tools/prompts/resources with Docket + tools = await self.get_tools() + for tool in tools.values(): + supports_task = ( + tool.task + if tool.task is not None + else self._support_tasks_by_default + ) + if supports_task: + docket.register(tool.fn) + + prompts = await self.get_prompts() + for prompt in prompts.values(): + supports_task = ( + prompt.task + if prompt.task is not None + else self._support_tasks_by_default + ) + if supports_task: + docket.register(prompt.fn) + + resources = await self.get_resources() + for resource in resources.values(): + supports_task = ( + resource.task + if resource.task is not None + else self._support_tasks_by_default + ) + if supports_task: + docket.register(resource.fn) + + # Set Docket in ContextVar so CurrentDocket can access it + docket_token = _current_docket.set(docket) + try: + # Build worker kwargs from settings + worker_kwargs: dict[str, Any] = { + "concurrency": settings.docket.concurrency, + "redelivery_timeout": settings.docket.redelivery_timeout, + "reconnection_delay": settings.docket.reconnection_delay, + } + if settings.docket.worker_name: + worker_kwargs["name"] = settings.docket.worker_name + + # Create and start Worker, then task group for run_forever() + async with ( + Worker(docket, **worker_kwargs) as worker, # type: ignore[arg-type] + anyio.create_task_group() as tg, + ): + # Set Worker in ContextVar so CurrentWorker can access it + worker_token = _current_worker.set(worker) + try: + # Start worker as background task + tg.start_soon(worker.run_forever) + + try: + yield user_lifespan_result + finally: + # Cancel task group when exiting (cancels worker) + tg.cancel_scope.cancel() + finally: + _current_worker.reset(worker_token) + finally: + # Reset ContextVar + _current_docket.reset(docket_token) + # Clear instance attribute + self._docket = None + finally: + # Reset server ContextVar + _current_server.reset(server_token) + @asynccontextmanager async def _lifespan_manager(self) -> AsyncIterator[None]: if self._lifespan_result_set: yield return - async with self._lifespan(self) as lifespan_result: + async with ( + self._lifespan(self) as user_lifespan_result, + self._docket_lifespan(user_lifespan_result) as lifespan_result, + ): self._lifespan_result = lifespan_result self._lifespan_result_set = True @@ -431,8 +569,180 @@ def _setup_handlers(self) -> None: self._mcp_server.call_tool(validate_input=self.strict_input_validation)( self._call_tool_mcp ) - self._mcp_server.read_resource()(self._read_resource_mcp) - self._mcp_server.get_prompt()(self._get_prompt_mcp) + # Register custom read_resource handler (SDK decorator doesn't support CreateTaskResult) + self._setup_read_resource_handler() + # Register custom get_prompt handler (SDK decorator doesn't support CreateTaskResult) + self._setup_get_prompt_handler() + # Register custom SEP-1686 task protocol handlers + self._setup_task_protocol_handlers() + + def _setup_read_resource_handler(self) -> None: + """ + Set up custom read_resource handler that supports task-augmented responses. + + The SDK's read_resource decorator doesn't support CreateTaskResult returns, + so we register a custom handler that checks request_context.experimental.is_task. + """ + + async def handler(req: mcp.types.ReadResourceRequest) -> mcp.types.ServerResult: + uri = req.params.uri + + # Check for task metadata via SDK's request context + task_meta = None + try: + ctx = self._mcp_server.request_context + if ctx.experimental.is_task: + task_meta = ctx.experimental.task_metadata + except (AttributeError, LookupError): + pass + + # Check for task metadata and route appropriately + if task_meta and fastmcp.settings.enable_tasks: + async with fastmcp.server.context.Context(fastmcp=self): + try: + resource = await self._resource_manager.get_resource(uri) + if resource and resource.task: + # Convert TaskMetadata to dict for handler + task_meta_dict = task_meta.model_dump(exclude_none=True) + return await handle_resource_as_task( + self, str(uri), resource, task_meta_dict + ) + except NotFoundError: + pass + + # Synchronous execution + result = await self._read_resource_mcp(uri) + + # Convert to proper ServerResult + if isinstance(result, mcp.types.ServerResult): + return result + + mcp_contents = [] + for item in result: + if isinstance(item.content, str): + mcp_contents.append( + mcp.types.TextResourceContents( + uri=uri, + text=item.content, + mimeType=item.mime_type or "text/plain", + ) + ) + elif isinstance(item.content, bytes): + import base64 + + mcp_contents.append( + mcp.types.BlobResourceContents( + uri=uri, + blob=base64.b64encode(item.content).decode(), + mimeType=item.mime_type or "application/octet-stream", + ) + ) + + return mcp.types.ServerResult( + mcp.types.ReadResourceResult(contents=mcp_contents) + ) + + self._mcp_server.request_handlers[mcp.types.ReadResourceRequest] = handler + + def _setup_get_prompt_handler(self) -> None: + """ + Set up custom get_prompt handler that supports task-augmented responses. + + The SDK's get_prompt decorator doesn't support CreateTaskResult returns, + so we register a custom handler that checks request_context.experimental.is_task. + """ + + async def handler(req: mcp.types.GetPromptRequest) -> mcp.types.ServerResult: + name = req.params.name + arguments = req.params.arguments + + # Check for task metadata via SDK's request context + task_meta = None + try: + ctx = self._mcp_server.request_context + if ctx.experimental.is_task: + task_meta = ctx.experimental.task_metadata + except (AttributeError, LookupError): + pass + + # Check for task metadata and route appropriately + if task_meta and fastmcp.settings.enable_tasks: + async with fastmcp.server.context.Context(fastmcp=self): + prompts = await self.get_prompts() + prompt = prompts.get(name) + if prompt and prompt.task: + # Convert TaskMetadata to dict for handler + task_meta_dict = task_meta.model_dump(exclude_none=True) + result = await handle_prompt_as_task( + self, name, arguments, task_meta_dict + ) + return mcp.types.ServerResult(result) + else: + logger.debug( + f"[{self.name}] Prompt {name} does not support background execution, " + "ignoring task metadata and executing synchronously" + ) + + # Synchronous execution + result = await self._get_prompt_mcp(name, arguments) + return mcp.types.ServerResult(result) + + self._mcp_server.request_handlers[mcp.types.GetPromptRequest] = handler + + def _setup_task_protocol_handlers(self) -> None: + """Register SEP-1686 task protocol handlers with SDK.""" + if not fastmcp.settings.enable_tasks: + return + + from mcp.types import ( + CancelTaskRequest, + GetTaskPayloadRequest, + GetTaskRequest, + ListTasksRequest, + ServerResult, + ) + + from fastmcp.server.tasks.protocol import ( + tasks_cancel_handler, + tasks_get_handler, + tasks_list_handler, + tasks_result_handler, + ) + + # Manually register handlers (SDK decorators fail with locally-defined functions) + # SDK expects handlers that receive Request objects and return ServerResult + + async def handle_get_task(req: GetTaskRequest) -> ServerResult: + params = req.params.model_dump(by_alias=True, exclude_none=True) + result = await tasks_get_handler(self, params) + return ServerResult(result) + + async def handle_get_task_result(req: GetTaskPayloadRequest) -> ServerResult: + params = req.params.model_dump(by_alias=True, exclude_none=True) + result = await tasks_result_handler(self, params) + return ServerResult(result) + + async def handle_list_tasks(req: ListTasksRequest) -> ServerResult: + params = ( + req.params.model_dump(by_alias=True, exclude_none=True) + if req.params + else {} + ) + result = await tasks_list_handler(self, params) + return ServerResult(result) + + async def handle_cancel_task(req: CancelTaskRequest) -> ServerResult: + params = req.params.model_dump(by_alias=True, exclude_none=True) + result = await tasks_cancel_handler(self, params) + return ServerResult(result) + + # Register directly with SDK (same as what decorators do internally) + self._mcp_server.request_handlers[GetTaskRequest] = handle_get_task + self._mcp_server.request_handlers[GetTaskPayloadRequest] = ( + handle_get_task_result + ) + self._mcp_server.request_handlers[ListTasksRequest] = handle_list_tasks + self._mcp_server.request_handlers[CancelTaskRequest] = handle_cancel_task async def _apply_middleware( self, @@ -1009,7 +1319,7 @@ async def _call_tool_mcp( """ Handle MCP 'callTool' requests. - Delegates to _call_tool, which should be overridden by FastMCP subclasses. + Detects SEP-1686 task metadata and routes to background execution if supported. Args: key: The name of the tool to call @@ -1024,6 +1334,36 @@ async def _call_tool_mcp( async with fastmcp.server.context.Context(fastmcp=self): try: + # Check for SEP-1686 task metadata via request context + task_meta = None + try: + # Access task metadata from SDK's request context + ctx = self._mcp_server.request_context + if ctx.experimental.is_task: + task_meta = ctx.experimental.task_metadata + except (AttributeError, LookupError): + # No request context available - proceed without task metadata + pass + + if task_meta and fastmcp.settings.enable_tasks: + # Task metadata present - check if tool supports background execution + tool = self._tool_manager._tools.get(key) + if tool and tool.task: + # Route to background execution + # Convert TaskMetadata to dict for handler + task_meta_dict = task_meta.model_dump(exclude_none=True) + return await handle_tool_as_task( + self, key, arguments, task_meta_dict + ) + else: + # Graceful degradation per SEP-1686 spec + logger.debug( + f"[{self.name}] Tool {key} does not support background execution, " + "ignoring task metadata and executing synchronously" + ) + # Fall through to synchronous execution + + # Synchronous execution (normal path) result = await self._call_tool_middleware(key, arguments) return result.to_mcp_result() except DisabledError as e: @@ -1103,6 +1443,7 @@ async def _read_resource_mcp(self, uri: AnyUrl | str) -> list[ReadResourceConten async with fastmcp.server.context.Context(fastmcp=self): try: + # Task routing handled by custom handler return list[ReadResourceContents]( await self._read_resource_middleware(uri) ) @@ -1197,6 +1538,7 @@ async def _get_prompt_mcp( async with fastmcp.server.context.Context(fastmcp=self): try: + # Task routing handled by custom handler return await self._get_prompt_middleware(name, arguments) except DisabledError as e: # convert to NotFoundError to avoid leaking prompt presence @@ -1331,6 +1673,7 @@ def tool( exclude_args: list[str] | None = None, meta: dict[str, Any] | None = None, enabled: bool | None = None, + task: bool | None = None, ) -> FunctionTool: ... @overload @@ -1348,6 +1691,7 @@ def tool( exclude_args: list[str] | None = None, meta: dict[str, Any] | None = None, enabled: bool | None = None, + task: bool | None = None, ) -> Callable[[AnyFunction], FunctionTool]: ... def tool( @@ -1364,6 +1708,7 @@ def tool( exclude_args: list[str] | None = None, meta: dict[str, Any] | None = None, enabled: bool | None = None, + task: bool | None = None, ) -> Callable[[AnyFunction], FunctionTool] | FunctionTool: """Decorator to register a tool. @@ -1437,6 +1782,21 @@ def my_tool(x: int) -> str: fn = name_or_fn tool_name = name # Use keyword name if provided, otherwise None + # Resolve task parameter to concrete boolean + supports_task: bool = ( + task if task is not None else self._support_tasks_by_default + ) + + # Disable task support for sync functions (Docket requires async) + if supports_task and not asyncio.iscoroutinefunction(fn): + if task is True: + # User explicitly requested task=True for sync function + logger.warning( + f"Tool '{tool_name or fn.__name__}' has task=True but is synchronous. " + "Background task support requires async functions. Disabling task support." + ) + supports_task = False + # Register the tool immediately and return the tool object # Note: Deprecation warning for exclude_args is handled in Tool.from_function tool = Tool.from_function( @@ -1452,6 +1812,7 @@ def my_tool(x: int) -> str: meta=meta, serializer=self._tool_serializer, enabled=enabled, + task=supports_task, ) self.add_tool(tool) return tool @@ -1485,6 +1846,7 @@ def my_tool(x: int) -> str: exclude_args=exclude_args, meta=meta, enabled=enabled, + task=task, ) def add_resource(self, resource: Resource) -> Resource: @@ -1544,6 +1906,7 @@ def resource( enabled: bool | None = None, annotations: Annotations | dict[str, Any] | None = None, meta: dict[str, Any] | None = None, + task: bool | None = None, ) -> Callable[[AnyFunction], Resource | ResourceTemplate]: """Decorator to register a function as a resource. @@ -1608,8 +1971,6 @@ async def get_weather(city: str) -> str: ) def decorator(fn: AnyFunction) -> Resource | ResourceTemplate: - from fastmcp.server.context import Context - if isinstance(fn, classmethod): # type: ignore[reportUnnecessaryIsInstance] raise ValueError( inspect.cleandoc( @@ -1622,14 +1983,28 @@ def decorator(fn: AnyFunction) -> Resource | ResourceTemplate: ) ) + # Resolve task parameter to concrete boolean + supports_task: bool = ( + task if task is not None else self._support_tasks_by_default + ) + + # Disable task support for sync functions (Docket requires async) + if supports_task and not asyncio.iscoroutinefunction(fn): + if task is True: + # User explicitly requested task=True for sync function + logger.warning( + f"Resource '{uri}' has task=True but is synchronous. " + "Background task support requires async functions. Disabling task support." + ) + supports_task = False + # Check if this should be a template has_uri_params = "{" in uri and "}" in uri - # check if the function has any parameters (other than injected context) - has_func_params = any( - p - for p in inspect.signature(fn).parameters.values() - if p.annotation is not Context - ) + # Use wrapper to check for user-facing parameters + from fastmcp.server.dependencies import without_injected_parameters + + wrapper_fn = without_injected_parameters(fn) + has_func_params = bool(inspect.signature(wrapper_fn).parameters) if has_uri_params or has_func_params: template = ResourceTemplate.from_function( @@ -1644,6 +2019,7 @@ def decorator(fn: AnyFunction) -> Resource | ResourceTemplate: enabled=enabled, annotations=annotations, meta=meta, + task=supports_task, ) self.add_template(template) return template @@ -1660,6 +2036,7 @@ def decorator(fn: AnyFunction) -> Resource | ResourceTemplate: enabled=enabled, annotations=annotations, meta=meta, + task=supports_task, ) self.add_resource(resource) return resource @@ -1705,6 +2082,7 @@ def prompt( tags: set[str] | None = None, enabled: bool | None = None, meta: dict[str, Any] | None = None, + task: bool | None = None, ) -> FunctionPrompt: ... @overload @@ -1719,6 +2097,7 @@ def prompt( tags: set[str] | None = None, enabled: bool | None = None, meta: dict[str, Any] | None = None, + task: bool | None = None, ) -> Callable[[AnyFunction], FunctionPrompt]: ... def prompt( @@ -1732,6 +2111,7 @@ def prompt( tags: set[str] | None = None, enabled: bool | None = None, meta: dict[str, Any] | None = None, + task: bool | None = None, ) -> Callable[[AnyFunction], FunctionPrompt] | FunctionPrompt: """Decorator to register a prompt. @@ -1822,6 +2202,21 @@ def another_prompt(data: str) -> list[Message]: fn = name_or_fn prompt_name = name # Use keyword name if provided, otherwise None + # Resolve task parameter to concrete boolean + supports_task: bool = ( + task if task is not None else self._support_tasks_by_default + ) + + # Disable task support for sync functions (Docket requires async) + if supports_task and not asyncio.iscoroutinefunction(fn): + if task is True: + # User explicitly requested task=True for sync function + logger.warning( + f"Prompt '{prompt_name or fn.__name__}' has task=True but is synchronous. " + "Background task support requires async functions. Disabling task support." + ) + supports_task = False + # Register the prompt immediately prompt = Prompt.from_function( fn=fn, @@ -1832,6 +2227,7 @@ def another_prompt(data: str) -> list[Message]: tags=tags, enabled=enabled, meta=meta, + task=supports_task, ) self.add_prompt(prompt) @@ -1863,6 +2259,7 @@ def another_prompt(data: str) -> list[Message]: tags=tags, enabled=enabled, meta=meta, + task=task, ) async def run_stdio_async( @@ -1887,11 +2284,30 @@ async def run_stdio_async( logger.info( f"Starting MCP server {self.name!r} with transport 'stdio'" ) + + # Build experimental capabilities + experimental_capabilities = {} + if fastmcp.settings.enable_tasks: + # Declare SEP-1686 task support per final spec (lines 49-63) + # Nested structure: {list: {}, cancel: {}, requests: {tools: {call: {}}}} + experimental_capabilities["tasks"] = { + "list": {}, + "cancel": {}, + "requests": { + "tools": {"call": {}}, + "prompts": {"get": {}}, + "resources": {"read": {}}, + }, + } + await self._mcp_server.run( read_stream, write_stream, self._mcp_server.create_initialization_options( - NotificationOptions(tools_changed=True) + notification_options=NotificationOptions( + tools_changed=True + ), + experimental_capabilities=experimental_capabilities, ), ) diff --git a/src/fastmcp/server/tasks/__init__.py b/src/fastmcp/server/tasks/__init__.py new file mode 100644 index 0000000000..513644806e --- /dev/null +++ b/src/fastmcp/server/tasks/__init__.py @@ -0,0 +1,42 @@ +"""MCP SEP-1686 background tasks support. + +This module implements protocol-level background task execution for MCP servers. +""" + +from fastmcp.server.tasks.converters import ( + convert_prompt_result, + convert_resource_result, + convert_tool_result, +) +from fastmcp.server.tasks.handlers import ( + handle_prompt_as_task, + handle_resource_as_task, + handle_tool_as_task, +) +from fastmcp.server.tasks.keys import ( + build_task_key, + get_client_task_id_from_key, + parse_task_key, +) +from fastmcp.server.tasks.protocol import ( + tasks_cancel_handler, + tasks_get_handler, + tasks_list_handler, + tasks_result_handler, +) + +__all__ = [ + "build_task_key", + "convert_prompt_result", + "convert_resource_result", + "convert_tool_result", + "get_client_task_id_from_key", + "handle_prompt_as_task", + "handle_resource_as_task", + "handle_tool_as_task", + "parse_task_key", + "tasks_cancel_handler", + "tasks_get_handler", + "tasks_list_handler", + "tasks_result_handler", +] diff --git a/src/fastmcp/server/tasks/converters.py b/src/fastmcp/server/tasks/converters.py new file mode 100644 index 0000000000..3cd4ffb904 --- /dev/null +++ b/src/fastmcp/server/tasks/converters.py @@ -0,0 +1,203 @@ +"""SEP-1686 task result converters. + +Converts raw task return values to MCP result types. +""" + +from __future__ import annotations + +import base64 +import json +from typing import TYPE_CHECKING, Any + +import mcp.types +import pydantic_core + +from fastmcp.tools.tool import ToolResult, _convert_to_content + +if TYPE_CHECKING: + from fastmcp.server.server import FastMCP + + +async def convert_tool_result( + server: FastMCP, raw_value: Any, tool_name: str, client_task_id: str +) -> mcp.types.CallToolResult: + """Convert raw tool return value to MCP CallToolResult. + + Replicates the serialization logic from tool.run() to properly handle + output_schema, structured content, etc. + + Args: + server: FastMCP server instance + raw_value: The raw return value from user's tool function + tool_name: Name of the tool (to get output_schema and serializer) + client_task_id: Client task ID for related-task metadata + + Returns: + CallToolResult with properly formatted content and structured content + """ + # Get the tool to access its configuration + tool = await server.get_tool(tool_name) + + # Build related-task metadata + related_task_meta = { + "modelcontextprotocol.io/related-task": { + "taskId": client_task_id, + } + } + + # If raw value is already ToolResult, use it directly + if isinstance(raw_value, ToolResult): + mcp_result = raw_value.to_mcp_result() + if isinstance(mcp_result, mcp.types.CallToolResult): + # Add metadata + mcp_result._meta = related_task_meta + return mcp_result + elif isinstance(mcp_result, tuple): + content, structured_content = mcp_result + return mcp.types.CallToolResult( + content=content, + structuredContent=structured_content, + _meta=related_task_meta, + ) + else: + return mcp.types.CallToolResult(content=mcp_result, _meta=related_task_meta) + + # Convert raw value to content blocks + unstructured_result = _convert_to_content(raw_value, serializer=tool.serializer) + + # Handle structured content creation (same logic as tool.run()) + structured_content = None + + if tool.output_schema is None: + # Try to serialize as dict for structured content + try: + sc = pydantic_core.to_jsonable_python(raw_value) + if isinstance(sc, dict): + structured_content = sc + except pydantic_core.PydanticSerializationError: + pass + else: + # Has output_schema - convert to JSON-able types + jsonable_value = pydantic_core.to_jsonable_python(raw_value) + wrap_result = tool.output_schema.get("x-fastmcp-wrap-result") + structured_content = ( + {"result": jsonable_value} if wrap_result else jsonable_value + ) + + return mcp.types.CallToolResult( + content=unstructured_result, + structuredContent=structured_content, + _meta=related_task_meta, + ) + + +async def convert_prompt_result( + server: FastMCP, raw_value: Any, prompt_name: str, client_task_id: str +) -> mcp.types.GetPromptResult: + """Convert raw prompt return value to MCP GetPromptResult. + + The user function returns raw values (strings, dicts, lists) that need + to be converted to PromptMessage objects. + + Args: + server: FastMCP server instance + raw_value: The raw return value from user's prompt function + prompt_name: Name of the prompt + client_task_id: Client task ID for related-task metadata + + Returns: + GetPromptResult with properly formatted messages + """ + from fastmcp.prompts.prompt import PromptMessage + + # Get the prompt for metadata + prompt = await server.get_prompt(prompt_name) + + # Normalize to list + if not isinstance(raw_value, list | tuple): + raw_value = [raw_value] + + # Convert to PromptMessages + messages: list[mcp.types.PromptMessage] = [] + for msg in raw_value: + if isinstance(msg, PromptMessage): + messages.append(msg.to_mcp()) + elif isinstance(msg, str): + messages.append( + mcp.types.PromptMessage( + role="user", + content=mcp.types.TextContent(type="text", text=msg), + ) + ) + elif isinstance(msg, dict): + messages.append(mcp.types.PromptMessage.model_validate(msg)) + else: + raise ValueError(f"Invalid message type: {type(msg)}") + + return mcp.types.GetPromptResult( + description=prompt.description or "", + messages=messages, + _meta={ + "modelcontextprotocol.io/related-task": { + "taskId": client_task_id, + } + }, + ) + + +async def convert_resource_result( + server: FastMCP, raw_value: Any, uri: str, client_task_id: str +) -> dict[str, Any]: + """Convert raw resource return value to MCP resource contents dict. + + Args: + server: FastMCP server instance + raw_value: The raw return value from user's resource function (str or bytes) + uri: Resource URI (for the contents response) + client_task_id: Client task ID for related-task metadata + + Returns: + Dict with 'contents' key containing list of resource contents + """ + # Build related-task metadata + related_task_meta = { + "modelcontextprotocol.io/related-task": { + "taskId": client_task_id, + } + } + + # Resources return str or bytes directly + if isinstance(raw_value, str): + return { + "contents": [ + { + "uri": uri, + "text": raw_value, + "mimeType": "text/plain", + } + ], + "_meta": related_task_meta, + } + elif isinstance(raw_value, bytes): + return { + "contents": [ + { + "uri": uri, + "blob": base64.b64encode(raw_value).decode(), + "mimeType": "application/octet-stream", + } + ], + "_meta": related_task_meta, + } + else: + # Fallback: convert to JSON string + return { + "contents": [ + { + "uri": uri, + "text": json.dumps(raw_value), + "mimeType": "application/json", + } + ], + "_meta": related_task_meta, + } diff --git a/src/fastmcp/server/tasks/handlers.py b/src/fastmcp/server/tasks/handlers.py new file mode 100644 index 0000000000..14edca901c --- /dev/null +++ b/src/fastmcp/server/tasks/handlers.py @@ -0,0 +1,343 @@ +"""SEP-1686 task execution handlers. + +Handles queuing tool/prompt/resource executions to Docket as background tasks. +""" + +from __future__ import annotations + +import uuid +from contextlib import suppress +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Any + +import mcp.types +from mcp.shared.exceptions import McpError +from mcp.types import INTERNAL_ERROR, ErrorData + +from fastmcp.server.dependencies import _current_docket, get_context +from fastmcp.server.tasks.keys import build_task_key + +if TYPE_CHECKING: + from fastmcp.server.server import FastMCP + +# Redis mapping TTL buffer: Add 15 minutes to Docket's execution_ttl +TASK_MAPPING_TTL_BUFFER_SECONDS = 15 * 60 + + +async def handle_tool_as_task( + server: FastMCP, + tool_name: str, + arguments: dict[str, Any], + task_meta: dict[str, Any], +) -> mcp.types.CallToolResult: + """Handle tool execution as background task (SEP-1686). + + Queues the user's actual function to Docket (preserving signature for DI), + stores raw return values, converts to MCP types on retrieval. + + Args: + server: FastMCP server instance + tool_name: Name of the tool to execute + arguments: Tool arguments + task_meta: Task metadata from request (contains ttl) + + Returns: + CallToolResult: Task stub with task metadata in _meta + """ + # Generate server-side task ID per SEP-1686 final spec (line 375-377) + # Server MUST generate task IDs, clients no longer provide them + server_task_id = str(uuid.uuid4()) + + # Record creation timestamp per SEP-1686 final spec (line 430) + # Format as ISO 8601 / RFC 3339 timestamp + created_at = datetime.now(timezone.utc).isoformat() + + # Get session ID and Docket + ctx = get_context() + session_id = ctx.session_id + + docket = _current_docket.get() + if docket is None: + raise McpError( + ErrorData( + code=INTERNAL_ERROR, + message="Background tasks require Docket. Set FASTMCP_ENABLE_DOCKET=true", + ) + ) + + # Build full task key with embedded metadata + task_key = build_task_key(session_id, server_task_id, "tool", tool_name) + + # Get the tool to access user's function + tool = await server.get_tool(tool_name) + + # Store task key mapping and creation timestamp in Redis for protocol handlers + redis_key = f"fastmcp:task:{session_id}:{server_task_id}" + created_at_key = f"fastmcp:task:{session_id}:{server_task_id}:created_at" + ttl_seconds = int( + docket.execution_ttl.total_seconds() + TASK_MAPPING_TTL_BUFFER_SECONDS + ) + async with docket.redis() as redis: + await redis.set(redis_key, task_key, ex=ttl_seconds) + await redis.set(created_at_key, created_at, ex=ttl_seconds) + + # Send notifications/tasks/created per SEP-1686 (mandatory) + # Send BEFORE queuing to avoid race where task completes before notification + notification = mcp.types.JSONRPCNotification( + jsonrpc="2.0", + method="notifications/tasks/created", + params={}, # Empty params per spec + _meta={ # taskId in _meta per spec + "modelcontextprotocol.io/related-task": { + "taskId": server_task_id, + } + }, + ) + + ctx = get_context() + with suppress(Exception): + # Don't let notification failures break task creation + await ctx.session.send_notification(notification) # type: ignore[arg-type] + + # Queue function to Docket (result storage via execution_ttl) + await docket.add( + tool.fn, # type: ignore[attr-defined] + key=task_key, + )(**arguments) + + # Spawn subscription task to send status notifications (SEP-1686 optional feature) + from fastmcp.server.tasks.subscriptions import subscribe_to_task_updates + + # Start subscription in session's task group (persists for connection lifetime) + if hasattr(ctx.session, "_subscription_task_group"): + tg = ctx.session._subscription_task_group # type: ignore[attr-defined] + if tg: + tg.start_soon( # type: ignore[union-attr] + subscribe_to_task_updates, + server_task_id, + task_key, + ctx.session, + docket, + ) + + # Return task stub + # Tasks MUST begin in "working" status per SEP-1686 final spec (line 381) + return mcp.types.CallToolResult( + content=[], + _meta={ + "modelcontextprotocol.io/task": { + "taskId": server_task_id, + "status": "working", + } + }, + ) + + +async def handle_prompt_as_task( + server: FastMCP, + prompt_name: str, + arguments: dict[str, Any] | None, + task_meta: dict[str, Any], +) -> mcp.types.GetPromptResult: + """Handle prompt execution as background task (SEP-1686). + + Queues the user's actual function to Docket (preserving signature for DI). + + Args: + server: FastMCP server instance + prompt_name: Name of the prompt to execute + arguments: Prompt arguments + task_meta: Task metadata from request (contains ttl) + + Returns: + GetPromptResult: Task stub with task metadata in _meta + """ + # Generate server-side task ID per SEP-1686 final spec (line 375-377) + # Server MUST generate task IDs, clients no longer provide them + server_task_id = str(uuid.uuid4()) + + # Record creation timestamp per SEP-1686 final spec (line 430) + # Format as ISO 8601 / RFC 3339 timestamp + created_at = datetime.now(timezone.utc).isoformat() + + # Get session ID and Docket + ctx = get_context() + session_id = ctx.session_id + + docket = _current_docket.get() + if docket is None: + raise McpError( + ErrorData( + code=INTERNAL_ERROR, + message="Background tasks require Docket. Set FASTMCP_ENABLE_DOCKET=true", + ) + ) + + # Build full task key with embedded metadata + task_key = build_task_key(session_id, server_task_id, "prompt", prompt_name) + + # Get the prompt + prompt = await server.get_prompt(prompt_name) + + # Store task key mapping and creation timestamp in Redis for protocol handlers + redis_key = f"fastmcp:task:{session_id}:{server_task_id}" + created_at_key = f"fastmcp:task:{session_id}:{server_task_id}:created_at" + ttl_seconds = int( + docket.execution_ttl.total_seconds() + TASK_MAPPING_TTL_BUFFER_SECONDS + ) + async with docket.redis() as redis: + await redis.set(redis_key, task_key, ex=ttl_seconds) + await redis.set(created_at_key, created_at, ex=ttl_seconds) + + # Send notifications/tasks/created per SEP-1686 (mandatory) + # Send BEFORE queuing to avoid race where task completes before notification + notification = mcp.types.JSONRPCNotification( + jsonrpc="2.0", + method="notifications/tasks/created", + params={}, + _meta={ + "modelcontextprotocol.io/related-task": { + "taskId": server_task_id, + } + }, + ) + with suppress(Exception): + await ctx.session.send_notification(notification) # type: ignore[arg-type] + + # Queue function to Docket (result storage via execution_ttl) + await docket.add( + prompt.fn, # type: ignore[attr-defined] + key=task_key, + )(**(arguments or {})) + + # Spawn subscription task to send status notifications (SEP-1686 optional feature) + from fastmcp.server.tasks.subscriptions import subscribe_to_task_updates + + # Start subscription in session's task group (persists for connection lifetime) + if hasattr(ctx.session, "_subscription_task_group"): + tg = ctx.session._subscription_task_group # type: ignore[attr-defined] + if tg: + tg.start_soon( # type: ignore[union-attr] + subscribe_to_task_updates, + server_task_id, + task_key, + ctx.session, + docket, + ) + + # Return task stub + # Tasks MUST begin in "working" status per SEP-1686 final spec (line 381) + return mcp.types.GetPromptResult( + description="", + messages=[], + _meta={ + "modelcontextprotocol.io/task": { + "taskId": server_task_id, + "status": "working", + } + }, + ) + + +async def handle_resource_as_task( + server: FastMCP, + uri: str, + resource, # Resource or ResourceTemplate + task_meta: dict[str, Any], +) -> mcp.types.ServerResult: + """Handle resource read as background task (SEP-1686). + + Queues the user's actual function to Docket. + + Args: + server: FastMCP server instance + uri: Resource URI + resource: Resource or ResourceTemplate object + task_meta: Task metadata from request (contains ttl) + + Returns: + ServerResult with ReadResourceResult stub + """ + # Generate server-side task ID per SEP-1686 final spec (line 375-377) + # Server MUST generate task IDs, clients no longer provide them + server_task_id = str(uuid.uuid4()) + + # Record creation timestamp per SEP-1686 final spec (line 430) + # Format as ISO 8601 / RFC 3339 timestamp + created_at = datetime.now(timezone.utc).isoformat() + + # Get session ID and Docket + ctx = get_context() + session_id = ctx.session_id + + docket = _current_docket.get() + if docket is None: + raise McpError( + ErrorData( + code=INTERNAL_ERROR, + message="Background tasks require Docket", + ) + ) + + # Build full task key with embedded metadata (use original URI) + task_key = build_task_key(session_id, server_task_id, "resource", str(uri)) + + # Store task key mapping and creation timestamp in Redis for protocol handlers + redis_key = f"fastmcp:task:{session_id}:{server_task_id}" + created_at_key = f"fastmcp:task:{session_id}:{server_task_id}:created_at" + ttl_seconds = int( + docket.execution_ttl.total_seconds() + TASK_MAPPING_TTL_BUFFER_SECONDS + ) + async with docket.redis() as redis: + await redis.set(redis_key, task_key, ex=ttl_seconds) + await redis.set(created_at_key, created_at, ex=ttl_seconds) + + # Send notifications/tasks/created per SEP-1686 (mandatory) + # Send BEFORE queuing to avoid race where task completes before notification + notification = mcp.types.JSONRPCNotification( + jsonrpc="2.0", + method="notifications/tasks/created", + params={}, + _meta={ + "modelcontextprotocol.io/related-task": { + "taskId": server_task_id, + } + }, + ) + with suppress(Exception): + await ctx.session.send_notification(notification) # type: ignore[arg-type] + + # Queue function to Docket (result storage via execution_ttl) + await docket.add( + resource.fn, # type: ignore[attr-defined] + key=task_key, + )() + + # Spawn subscription task to send status notifications (SEP-1686 optional feature) + from fastmcp.server.tasks.subscriptions import subscribe_to_task_updates + + # Start subscription in session's task group (persists for connection lifetime) + if hasattr(ctx.session, "_subscription_task_group"): + tg = ctx.session._subscription_task_group # type: ignore[attr-defined] + if tg: + tg.start_soon( # type: ignore[union-attr] + subscribe_to_task_updates, + server_task_id, + task_key, + ctx.session, + docket, + ) + + # Return task stub + # Tasks MUST begin in "working" status per SEP-1686 final spec (line 381) + return mcp.types.ServerResult( + mcp.types.ReadResourceResult( + contents=[], + _meta={ + "modelcontextprotocol.io/task": { + "taskId": server_task_id, + "status": "working", + } + }, + ) + ) diff --git a/src/fastmcp/server/tasks/keys.py b/src/fastmcp/server/tasks/keys.py new file mode 100644 index 0000000000..9931eb7699 --- /dev/null +++ b/src/fastmcp/server/tasks/keys.py @@ -0,0 +1,93 @@ +"""Task key management for SEP-1686 background tasks. + +Task keys encode security scoping and metadata in the Docket key format: + {session_id}:{client_task_id}:{task_type}:{component_identifier} + +This format provides: +- Session-based security scoping (prevents cross-session access) +- Task type identification (tool/prompt/resource) +- Component identification (name or URI for result conversion) +""" + +from urllib.parse import quote, unquote + + +def build_task_key( + session_id: str, + client_task_id: str, + task_type: str, + component_identifier: str, +) -> str: + """Build Docket task key with embedded metadata. + + Format: {session_id}:{client_task_id}:{task_type}:{component_identifier} + + The component_identifier is URI-encoded to handle special characters (colons, slashes, etc.). + + Args: + session_id: Session ID for security scoping + client_task_id: Client-provided task ID + task_type: Type of task ("tool", "prompt", "resource") + component_identifier: Tool name, prompt name, or resource URI + + Returns: + Encoded task key for Docket + + Examples: + >>> build_task_key("session123", "task456", "tool", "my_tool") + 'session123:task456:tool:my_tool' + + >>> build_task_key("session123", "task456", "resource", "file://data.txt") + 'session123:task456:resource:file%3A%2F%2Fdata.txt' + """ + encoded_identifier = quote(component_identifier, safe="") + return f"{session_id}:{client_task_id}:{task_type}:{encoded_identifier}" + + +def parse_task_key(task_key: str) -> dict[str, str]: + """Parse Docket task key to extract metadata. + + Args: + task_key: Encoded task key from Docket + + Returns: + Dict with keys: session_id, client_task_id, task_type, component_identifier + + Examples: + >>> parse_task_key("session123:task456:tool:my_tool") + {'session_id': 'session123', 'client_task_id': 'task456', + 'task_type': 'tool', 'component_identifier': 'my_tool'} + + >>> parse_task_key("session123:task456:resource:file%3A%2F%2Fdata.txt") + {'session_id': 'session123', 'client_task_id': 'task456', + 'task_type': 'resource', 'component_identifier': 'file://data.txt'} + """ + parts = task_key.split(":", 3) + if len(parts) != 4: + raise ValueError( + f"Invalid task key format: {task_key}. " + f"Expected: {{session_id}}:{{client_task_id}}:{{task_type}}:{{component_identifier}}" + ) + + return { + "session_id": parts[0], + "client_task_id": parts[1], + "task_type": parts[2], + "component_identifier": unquote(parts[3]), + } + + +def get_client_task_id_from_key(task_key: str) -> str: + """Extract just the client task ID from a task key. + + Args: + task_key: Full encoded task key + + Returns: + Client-provided task ID (second segment) + + Example: + >>> get_client_task_id_from_key("session123:task456:tool:my_tool") + 'task456' + """ + return task_key.split(":", 3)[1] diff --git a/src/fastmcp/server/tasks/protocol.py b/src/fastmcp/server/tasks/protocol.py new file mode 100644 index 0000000000..902947862b --- /dev/null +++ b/src/fastmcp/server/tasks/protocol.py @@ -0,0 +1,355 @@ +"""SEP-1686 task protocol handlers. + +Implements MCP task protocol methods: tasks/get, tasks/result, tasks/list, tasks/cancel, tasks/delete. +""" + +from __future__ import annotations + +from datetime import datetime, timedelta, timezone +from typing import TYPE_CHECKING, Any + +import mcp.types +from docket.execution import ExecutionState +from mcp.shared.exceptions import McpError +from mcp.types import ( + INTERNAL_ERROR, + INVALID_PARAMS, + CancelTaskResult, + ErrorData, + GetTaskResult, + ListTasksResult, +) + +from fastmcp.server.tasks.converters import ( + convert_prompt_result, + convert_resource_result, + convert_tool_result, +) +from fastmcp.server.tasks.keys import parse_task_key + +if TYPE_CHECKING: + from fastmcp.server.server import FastMCP + +# Map Docket execution states to MCP task status strings +# Per SEP-1686 final spec (line 381): tasks MUST begin in "working" status +DOCKET_TO_MCP_STATE: dict[ExecutionState, str] = { + ExecutionState.SCHEDULED: "working", # Initial state per spec + ExecutionState.QUEUED: "working", # Initial state per spec + ExecutionState.RUNNING: "working", + ExecutionState.COMPLETED: "completed", + ExecutionState.FAILED: "failed", + ExecutionState.CANCELLED: "cancelled", +} + + +async def tasks_get_handler(server: FastMCP, params: dict[str, Any]) -> GetTaskResult: + """Handle MCP 'tasks/get' request (SEP-1686). + + Args: + server: FastMCP server instance + params: Request params containing taskId + + Returns: + GetTaskResult: Task status response with spec-compliant fields + """ + import fastmcp.server.context + + async with fastmcp.server.context.Context(fastmcp=server) as ctx: + client_task_id = params.get("taskId") + if not client_task_id: + raise McpError( + ErrorData( + code=INVALID_PARAMS, message="Missing required parameter: taskId" + ) + ) + + # Get session ID from Context + session_id = ctx.session_id + + # Get execution from Docket (use instance attribute for cross-task access) + docket = server._docket + if docket is None: + raise McpError( + ErrorData( + code=INTERNAL_ERROR, + message="Background tasks require Docket", + ) + ) + + # Look up full task key and creation timestamp from Redis + redis_key = f"fastmcp:task:{session_id}:{client_task_id}" + created_at_key = f"fastmcp:task:{session_id}:{client_task_id}:created_at" + async with docket.redis() as redis: + task_key_bytes = await redis.get(redis_key) + created_at_bytes = await redis.get(created_at_key) + + task_key = None if task_key_bytes is None else task_key_bytes.decode("utf-8") + created_at = ( + None if created_at_bytes is None else created_at_bytes.decode("utf-8") + ) + + if task_key is None: + # Task not found - raise error per MCP protocol + raise McpError( + ErrorData( + code=INVALID_PARAMS, message=f"Task {client_task_id} not found" + ) + ) + + execution = await docket.get_execution(task_key) + if execution is None: + # Task key exists but no execution - raise error + raise McpError( + ErrorData( + code=INVALID_PARAMS, + message=f"Task {client_task_id} execution not found", + ) + ) + + # Sync state from Redis + await execution.sync() + + # Map Docket state to MCP state + mcp_state = DOCKET_TO_MCP_STATE.get(execution.state, "failed") + + # Build response (use default ttl since we don't track per-task values) + # createdAt is REQUIRED per SEP-1686 final spec (line 430) + # Per spec lines 447-448: SHOULD NOT include related-task metadata in tasks/get + error_message = None + status_message = None + + if execution.state == ExecutionState.FAILED: + try: + await execution.get_result(timeout=timedelta(seconds=0)) + except Exception as error: + error_message = str(error) + status_message = f"Task failed: {error_message}" + elif execution.progress and execution.progress.message: + # Extract progress message from Docket if available (spec line 403) + status_message = execution.progress.message + + return GetTaskResult( + taskId=client_task_id, + status=mcp_state, # type: ignore[arg-type] + createdAt=created_at, # type: ignore[arg-type] + lastUpdatedAt=datetime.now(timezone.utc), + ttl=60000, + pollInterval=1000, + statusMessage=status_message, + ) + + +async def tasks_result_handler(server: FastMCP, params: dict[str, Any]) -> Any: + """Handle MCP 'tasks/result' request (SEP-1686). + + Converts raw task return values to MCP types based on task type. + + Args: + server: FastMCP server instance + params: Request params containing taskId + + Returns: + MCP result (CallToolResult, GetPromptResult, or ReadResourceResult) + """ + import fastmcp.server.context + + async with fastmcp.server.context.Context(fastmcp=server) as ctx: + client_task_id = params.get("taskId") + if not client_task_id: + raise McpError( + ErrorData( + code=INVALID_PARAMS, message="Missing required parameter: taskId" + ) + ) + + # Get session ID from Context + session_id = ctx.session_id + + # Get execution from Docket (use instance attribute for cross-task access) + docket = server._docket + if docket is None: + raise McpError( + ErrorData( + code=INTERNAL_ERROR, + message="Background tasks require Docket", + ) + ) + + # Look up full task key from Redis + redis_key = f"fastmcp:task:{session_id}:{client_task_id}" + async with docket.redis() as redis: + task_key_bytes = await redis.get(redis_key) + + task_key = None if task_key_bytes is None else task_key_bytes.decode("utf-8") + + if task_key is None: + raise McpError( + ErrorData( + code=INVALID_PARAMS, + message=f"Invalid taskId: {client_task_id} not found", + ) + ) + + execution = await docket.get_execution(task_key) + if execution is None: + raise McpError( + ErrorData( + code=INVALID_PARAMS, + message=f"Invalid taskId: {client_task_id} not found", + ) + ) + + # Sync state from Redis + await execution.sync() + + # Check if completed + if execution.state not in (ExecutionState.COMPLETED, ExecutionState.FAILED): + mcp_state = DOCKET_TO_MCP_STATE.get(execution.state, "failed") + raise McpError( + ErrorData( + code=INVALID_PARAMS, + message=f"Task not completed yet (current state: {mcp_state})", + ) + ) + + # Get result from Docket + try: + raw_value = await execution.get_result(timeout=timedelta(seconds=0)) + except Exception as error: + # Task failed - return error result + return mcp.types.CallToolResult( + content=[mcp.types.TextContent(type="text", text=str(error))], + isError=True, + _meta={ + "modelcontextprotocol.io/related-task": { + "taskId": client_task_id, + } + }, + ) + + # Parse task key to get type and component info + key_parts = parse_task_key(task_key) + task_type = key_parts["task_type"] + + # Convert based on task type (pass client_task_id for metadata) + if task_type == "tool": + return await convert_tool_result( + server, raw_value, key_parts["component_identifier"], client_task_id + ) + elif task_type == "prompt": + return await convert_prompt_result( + server, raw_value, key_parts["component_identifier"], client_task_id + ) + elif task_type == "resource": + return await convert_resource_result( + server, raw_value, key_parts["component_identifier"], client_task_id + ) + else: + raise McpError( + ErrorData( + code=INTERNAL_ERROR, + message=f"Internal error: Unknown task type: {task_type}", + ) + ) + + +async def tasks_list_handler( + server: FastMCP, params: dict[str, Any] +) -> ListTasksResult: + """Handle MCP 'tasks/list' request (SEP-1686). + + Note: With client-side tracking, this returns minimal info. + + Args: + server: FastMCP server instance + params: Request params (cursor, limit) + + Returns: + ListTasksResult: Response with tasks list and pagination + """ + # Return empty list - client tracks tasks locally + return ListTasksResult(tasks=[], nextCursor=None) + + +async def tasks_cancel_handler( + server: FastMCP, params: dict[str, Any] +) -> CancelTaskResult: + """Handle MCP 'tasks/cancel' request (SEP-1686). + + Cancels a running task, transitioning it to cancelled state. + + Args: + server: FastMCP server instance + params: Request params containing taskId + + Returns: + CancelTaskResult: Task status response showing cancelled state + """ + import fastmcp.server.context + + async with fastmcp.server.context.Context(fastmcp=server) as ctx: + client_task_id = params.get("taskId") + if not client_task_id: + raise McpError( + ErrorData( + code=INVALID_PARAMS, message="Missing required parameter: taskId" + ) + ) + + # Get session ID from Context + session_id = ctx.session_id + + docket = server._docket + if docket is None: + raise McpError( + ErrorData( + code=INTERNAL_ERROR, + message="Background tasks require Docket", + ) + ) + + # Look up full task key and creation timestamp from Redis + redis_key = f"fastmcp:task:{session_id}:{client_task_id}" + created_at_key = f"fastmcp:task:{session_id}:{client_task_id}:created_at" + async with docket.redis() as redis: + task_key_bytes = await redis.get(redis_key) + created_at_bytes = await redis.get(created_at_key) + + task_key = None if task_key_bytes is None else task_key_bytes.decode("utf-8") + created_at = ( + None if created_at_bytes is None else created_at_bytes.decode("utf-8") + ) + + if task_key is None: + raise McpError( + ErrorData( + code=INVALID_PARAMS, + message=f"Invalid taskId: {client_task_id} not found", + ) + ) + + # Check if task exists + execution = await docket.get_execution(task_key) + if execution is None: + raise McpError( + ErrorData( + code=INVALID_PARAMS, + message=f"Invalid taskId: {client_task_id} not found", + ) + ) + + # Cancel via Docket (now sets CANCELLED state natively) + await docket.cancel(task_key) + + # Return task status with cancelled state + # createdAt is REQUIRED per SEP-1686 final spec (line 430) + # Per spec lines 447-448: SHOULD NOT include related-task metadata in tasks/cancel + return CancelTaskResult( + taskId=client_task_id, + status="cancelled", + createdAt=created_at or datetime.now(timezone.utc).isoformat(), + lastUpdatedAt=datetime.now(timezone.utc), + ttl=60_000, + pollInterval=1000, + statusMessage="Task cancelled", + ) diff --git a/src/fastmcp/server/tasks/subscriptions.py b/src/fastmcp/server/tasks/subscriptions.py new file mode 100644 index 0000000000..dae18675b5 --- /dev/null +++ b/src/fastmcp/server/tasks/subscriptions.py @@ -0,0 +1,205 @@ +"""Task subscription helpers for sending MCP notifications (SEP-1686). + +Subscribes to Docket execution state changes and sends notifications/tasks/status +to clients when their tasks change state. +""" + +from __future__ import annotations + +from contextlib import suppress +from datetime import datetime, timezone +from typing import TYPE_CHECKING + +from docket.execution import ExecutionState +from mcp.types import TaskStatusNotification, TaskStatusNotificationParams + +from fastmcp.server.tasks.protocol import DOCKET_TO_MCP_STATE +from fastmcp.utilities.logging import get_logger + +if TYPE_CHECKING: + from docket import Docket + from docket.execution import Execution + from mcp.server.session import ServerSession + +logger = get_logger(__name__) + + +async def subscribe_to_task_updates( + task_id: str, + task_key: str, + session: ServerSession, + docket: Docket, +) -> None: + """Subscribe to Docket execution events and send MCP notifications. + + Per SEP-1686 lines 436-444, servers MAY send notifications/tasks/status + when task state changes. This is an optional optimization that reduces + client polling frequency. + + Args: + task_id: Client-visible task ID (server-generated UUID) + task_key: Internal Docket execution key (includes session, type, component) + session: MCP ServerSession for sending notifications + docket: Docket instance for subscribing to execution events + """ + try: + execution = await docket.get_execution(task_key) + if execution is None: + logger.warning(f"No execution found for task {task_id}") + return + + # Subscribe to state and progress events from Docket + async for event in execution.subscribe(): + if event["type"] == "state": + # Send notifications/tasks/status when state changes + await _send_status_notification( + session=session, + task_id=task_id, + task_key=task_key, + docket=docket, + state=event["state"], # type: ignore[typeddict-item] + ) + elif event["type"] == "progress": + # Send notification when progress message changes + await _send_progress_notification( + session=session, + task_id=task_id, + task_key=task_key, + docket=docket, + execution=execution, + ) + + except Exception as e: + logger.warning(f"Subscription task failed for {task_id}: {e}", exc_info=True) + + +async def _send_status_notification( + session: ServerSession, + task_id: str, + task_key: str, + docket: Docket, + state: ExecutionState, +) -> None: + """Send notifications/tasks/status to client. + + Per SEP-1686 line 454: notification SHOULD NOT include related-task metadata + (taskId is already in params). + + Args: + session: MCP ServerSession + task_id: Client-visible task ID + task_key: Internal task key (for metadata lookup) + docket: Docket instance + state: Docket execution state (enum) + """ + # Map Docket state to MCP status + mcp_status = DOCKET_TO_MCP_STATE.get(state, "failed") + + # Extract session_id from task_key for Redis lookup + from fastmcp.server.tasks.keys import parse_task_key + + key_parts = parse_task_key(task_key) + session_id = key_parts["session_id"] + + # Retrieve createdAt timestamp from Redis + created_at_key = f"fastmcp:task:{session_id}:{task_id}:created_at" + async with docket.redis() as redis: + created_at_bytes = await redis.get(created_at_key) + + created_at = ( + created_at_bytes.decode("utf-8") + if created_at_bytes + else datetime.now(timezone.utc).isoformat() + ) + + # Build status message + status_message = None + if state == ExecutionState.COMPLETED: + status_message = "Task completed successfully" + elif state == ExecutionState.FAILED: + status_message = "Task failed" + elif state == ExecutionState.CANCELLED: + status_message = "Task cancelled" + + params_dict = { + "taskId": task_id, + "status": mcp_status, + "createdAt": created_at, + "lastUpdatedAt": datetime.now(timezone.utc).isoformat(), + "ttl": 60000, + "pollInterval": 1000, + } + + if status_message: + params_dict["statusMessage"] = status_message + + # Create notification (no related-task metadata per spec line 454) + notification = TaskStatusNotification( + params=TaskStatusNotificationParams.model_validate(params_dict), + ) + + # Send notification (don't let failures break the subscription) + with suppress(Exception): + await session.send_notification(notification) # type: ignore[arg-type] + + +async def _send_progress_notification( + session: ServerSession, + task_id: str, + task_key: str, + docket: Docket, + execution: Execution, +) -> None: + """Send notifications/tasks/status when progress updates. + + Args: + session: MCP ServerSession + task_id: Client-visible task ID + task_key: Internal task key + docket: Docket instance + execution: Execution object with current progress + """ + # Sync execution to get latest progress + await execution.sync() + + # Only send if there's a progress message + if not execution.progress or not execution.progress.message: + return + + # Map Docket state to MCP status + mcp_status = DOCKET_TO_MCP_STATE.get(execution.state, "failed") + + # Extract session_id from task_key for Redis lookup + from fastmcp.server.tasks.keys import parse_task_key + + key_parts = parse_task_key(task_key) + session_id = key_parts["session_id"] + + # Retrieve createdAt timestamp from Redis + created_at_key = f"fastmcp:task:{session_id}:{task_id}:created_at" + async with docket.redis() as redis: + created_at_bytes = await redis.get(created_at_key) + + created_at = ( + created_at_bytes.decode("utf-8") + if created_at_bytes + else datetime.now(timezone.utc).isoformat() + ) + + params_dict = { + "taskId": task_id, + "status": mcp_status, + "createdAt": created_at, + "lastUpdatedAt": datetime.now(timezone.utc).isoformat(), + "ttl": 60000, + "pollInterval": 1000, + "statusMessage": execution.progress.message, + } + + # Create and send notification + notification = TaskStatusNotification( + params=TaskStatusNotificationParams.model_validate(params_dict), + ) + + with suppress(Exception): + await session.send_notification(notification) # type: ignore[arg-type] diff --git a/src/fastmcp/settings.py b/src/fastmcp/settings.py index 69661c7fa8..9ba30e4371 100644 --- a/src/fastmcp/settings.py +++ b/src/fastmcp/settings.py @@ -3,6 +3,7 @@ import inspect import os import warnings +from datetime import timedelta from pathlib import Path from typing import TYPE_CHECKING, Annotated, Any, Literal @@ -30,6 +31,94 @@ from fastmcp.server.auth.auth import AuthProvider +class DocketSettings(BaseSettings): + """Docket worker configuration.""" + + model_config = SettingsConfigDict( + env_prefix="FASTMCP_DOCKET_", + extra="ignore", + ) + + name: Annotated[ + str, + Field( + description=inspect.cleandoc( + """ + Name for the Docket queue. All servers/workers sharing the same name + and backend URL will share a task queue. + """ + ), + ), + ] = "fastmcp" + + url: Annotated[ + str, + Field( + description=inspect.cleandoc( + """ + URL for the Docket backend. Supports: + - memory:// - In-memory backend (single process only) + - redis://host:port/db - Redis/Valkey backend (distributed, multi-process) + + Example: redis://localhost:6379/0 + + Default is memory:// for single-process scenarios. Use Redis or Valkey + when coordinating tasks across multiple processes (e.g., additional + workers via the fastmcp tasks CLI). + """ + ), + ), + ] = "memory://" + + worker_name: Annotated[ + str | None, + Field( + description=inspect.cleandoc( + """ + Name for the Docket worker. If None, Docket will auto-generate + a unique worker name. + """ + ), + ), + ] = None + + concurrency: Annotated[ + int, + Field( + description=inspect.cleandoc( + """ + Maximum number of tasks the worker can process concurrently. + """ + ), + ), + ] = 10 + + redelivery_timeout: Annotated[ + timedelta, + Field( + description=inspect.cleandoc( + """ + Task redelivery timeout. If a worker doesn't complete + a task within this time, the task will be redelivered to another + worker. + """ + ), + ), + ] = timedelta(seconds=300) + + reconnection_delay: Annotated[ + timedelta, + Field( + description=inspect.cleandoc( + """ + Delay between reconnection attempts when the worker + loses connection to the Docket backend. + """ + ), + ), + ] = timedelta(seconds=5) + + class ExperimentalSettings(BaseSettings): model_config = SettingsConfigDict( env_prefix="FASTMCP_EXPERIMENTAL_", @@ -120,6 +209,40 @@ def normalize_log_level(cls, v): experimental: ExperimentalSettings = ExperimentalSettings() + # Docket/Tasks settings + enable_docket: Annotated[ + bool, + Field( + description=inspect.cleandoc( + """ + Enable Docket support for background task execution. + When enabled, FastMCP will create a Docket instance with a Worker + available via dependency injection. This allows tools, prompts, and + resources to schedule background work using CurrentDocket(). + """ + ), + ), + ] = False + + enable_tasks: Annotated[ + bool, + Field( + description=inspect.cleandoc( + """ + Enable MCP SEP-1686 task protocol support for background execution. + + Server-side: Requires enable_docket=True (validated at server startup). + Advertises task capabilities and handles task/* protocol methods. + + Client-side: Advertises task capability to servers. No Docket needed + on client side. + """ + ), + ), + ] = False + + docket: DocketSettings = DocketSettings() + enable_rich_tracebacks: Annotated[ bool, Field( diff --git a/src/fastmcp/tools/tool.py b/src/fastmcp/tools/tool.py index 1f3d88ca34..31f9636d62 100644 --- a/src/fastmcp/tools/tool.py +++ b/src/fastmcp/tools/tool.py @@ -16,13 +16,20 @@ import mcp.types import pydantic_core from mcp.shared.tool_name_validation import validate_and_warn_tool_name -from mcp.types import CallToolResult, ContentBlock, Icon, TextContent, ToolAnnotations +from mcp.types import ( + CallToolResult, + ContentBlock, + Icon, + TextContent, + ToolAnnotations, + ToolExecution, +) from mcp.types import Tool as MCPTool from pydantic import Field, PydanticSchemaGenerationError, model_validator from typing_extensions import TypeVar import fastmcp -from fastmcp.server.dependencies import get_context +from fastmcp.server.dependencies import get_context, without_injected_parameters from fastmcp.utilities.components import FastMCPComponent from fastmcp.utilities.json_schema import compress_schema from fastmcp.utilities.logging import get_logger @@ -33,7 +40,6 @@ NotSet, NotSetT, create_function_without_params, - find_kwarg_by_type, get_cached_typeadapter, replace_type, ) @@ -130,6 +136,12 @@ class Tool(FastMCPComponent): ToolResultSerializerType | None, Field(description="Optional custom serializer for tool results"), ] = None + task: Annotated[ + bool, + Field( + description="Whether this tool supports background task execution (SEP-1686)" + ), + ] = False @model_validator(mode="after") def _validate_tool_name(self) -> Tool: @@ -167,6 +179,15 @@ def to_mcp_tool( elif self.annotations and self.annotations.title: title = self.annotations.title + # Auto-populate task execution mode based on tool.task flag if not explicitly set + # Per SEP-1686: tools declare task support via execution.task + # task values: "never" (no task support), "optional" (supports both), "always" (requires task) + annotations = self.annotations + execution = None + if self.task: + # Tool supports background execution - use "optional" to allow both immediate and task execution + execution = ToolExecution(task="optional") + return MCPTool( name=overrides.get("name", self.name), title=overrides.get("title", title), @@ -174,7 +195,8 @@ def to_mcp_tool( inputSchema=overrides.get("inputSchema", self.parameters), outputSchema=overrides.get("outputSchema", self.output_schema), icons=overrides.get("icons", self.icons), - annotations=overrides.get("annotations", self.annotations), + annotations=overrides.get("annotations", annotations), + execution=overrides.get("execution", execution), _meta=overrides.get( "_meta", self.get_meta(include_fastmcp_meta=include_fastmcp_meta) ), @@ -194,6 +216,7 @@ def from_function( serializer: ToolResultSerializerType | None = None, meta: dict[str, Any] | None = None, enabled: bool | None = None, + task: bool | None = None, ) -> FunctionTool: """Create a Tool from a function.""" return FunctionTool.from_function( @@ -209,6 +232,7 @@ def from_function( serializer=serializer, meta=meta, enabled=enabled, + task=task, ) async def run(self, arguments: dict[str, Any]) -> ToolResult: @@ -276,6 +300,7 @@ def from_function( serializer: ToolResultSerializerType | None = None, meta: dict[str, Any] | None = None, enabled: bool | None = None, + task: bool | None = None, ) -> FunctionTool: """Create a Tool from a function.""" if exclude_args and fastmcp.settings.deprecation_warnings: @@ -322,21 +347,14 @@ def from_function( serializer=serializer, meta=meta, enabled=enabled if enabled is not None else True, + task=task if task is not None else False, ) async def run(self, arguments: dict[str, Any]) -> ToolResult: """Run the tool with arguments.""" - from fastmcp.server.context import Context - - arguments = arguments.copy() - - context_kwarg = find_kwarg_by_type(self.fn, kwarg_type=Context) - if context_kwarg and context_kwarg not in arguments: - arguments[context_kwarg] = get_context() - - type_adapter = get_cached_typeadapter(self.fn) + wrapper_fn = without_injected_parameters(self.fn) + type_adapter = get_cached_typeadapter(wrapper_fn) result = type_adapter.validate_python(arguments) - if inspect.isawaitable(result): result = await result @@ -406,8 +424,6 @@ def from_function( validate: bool = True, wrap_non_object_output_schema: bool = True, ) -> ParsedFunction: - from fastmcp.server.context import Context - if validate: sig = inspect.signature(fn) # Reject functions with *args or **kwargs @@ -443,22 +459,19 @@ def from_function( if isinstance(fn, staticmethod): fn = fn.__func__ - prune_params: list[str] = [] - context_kwarg = find_kwarg_by_type(fn, kwarg_type=Context) - if context_kwarg: - prune_params.append(context_kwarg) - if exclude_args: - prune_params.extend(exclude_args) + # Handle injected parameters (Context, Docket dependencies) + wrapper_fn = without_injected_parameters(fn) - # Create a function without excluded parameters in annotations - # This prevents Pydantic from trying to serialize non-serializable types - # before we can exclude them in compress_schema - fn_for_typeadapter = fn - if prune_params: - fn_for_typeadapter = create_function_without_params(fn, prune_params) + # Also handle exclude_args with non-serializable types (issue #2431) + # This must happen before Pydantic tries to serialize the parameters + if exclude_args: + wrapper_fn = create_function_without_params(wrapper_fn, list(exclude_args)) - input_type_adapter = get_cached_typeadapter(fn_for_typeadapter) + input_type_adapter = get_cached_typeadapter(wrapper_fn) input_schema = input_type_adapter.json_schema() + + # Compress and handle exclude_args + prune_params = list(exclude_args) if exclude_args else None input_schema = compress_schema( input_schema, prune_params=prune_params, prune_titles=True ) diff --git a/src/fastmcp/utilities/tests.py b/src/fastmcp/utilities/tests.py index 1b3159aadf..45db76ab7f 100644 --- a/src/fastmcp/utilities/tests.py +++ b/src/fastmcp/utilities/tests.py @@ -12,6 +12,7 @@ import httpx import uvicorn +from pytest import LogCaptureFixture from fastmcp import settings from fastmcp.client.auth.oauth import OAuth @@ -221,7 +222,9 @@ async def test_greet(server: str): @contextmanager -def caplog_for_fastmcp(caplog): +def caplog_for_fastmcp( + caplog: LogCaptureFixture, +) -> Generator[LogCaptureFixture, None, None]: """Context manager to capture logs from FastMCP loggers even when propagation is disabled.""" caplog.clear() logger = logging.getLogger("fastmcp") diff --git a/tests/cli/test_tasks.py b/tests/cli/test_tasks.py new file mode 100644 index 0000000000..d0b7994af5 --- /dev/null +++ b/tests/cli/test_tasks.py @@ -0,0 +1,65 @@ +"""Tests for the fastmcp tasks CLI.""" + +import pytest + +from fastmcp.cli.tasks import check_docket_enabled, tasks_app +from fastmcp.utilities.tests import temporary_settings + + +class TestCheckDocketEnabled: + """Test the Docket enabled checker function.""" + + def test_succeeds_when_docket_enabled_with_redis(self): + """Test that it succeeds when Docket is enabled with Redis.""" + with temporary_settings( + enable_docket=True, + docket__url="redis://localhost:6379/0", + ): + check_docket_enabled() + + def test_exits_when_docket_not_enabled(self): + """Test that it exits with helpful error when Docket not enabled.""" + with temporary_settings(enable_docket=False): + with pytest.raises(SystemExit) as exc_info: + check_docket_enabled() + + assert isinstance(exc_info.value, SystemExit) + assert exc_info.value.code == 1 + + def test_exits_with_helpful_error_for_memory_url(self): + """Test that it exits with helpful error for memory:// URLs.""" + with temporary_settings( + enable_docket=True, + docket__url="memory://test-123", + ): + with pytest.raises(SystemExit) as exc_info: + check_docket_enabled() + + assert isinstance(exc_info.value, SystemExit) + assert exc_info.value.code == 1 + + +class TestWorkerCommand: + """Test the worker command.""" + + def test_worker_command_parsing(self): + """Test that worker command parses arguments correctly.""" + command, bound, _ = tasks_app.parse_args(["worker", "server.py"]) + assert command.__name__ == "worker" # type: ignore[attr-defined] + assert bound.arguments["server_spec"] == "server.py" + + +class TestTasksAppIntegration: + """Test the tasks app integration.""" + + def test_tasks_app_exists(self): + """Test that the tasks app is properly configured.""" + assert "tasks" in tasks_app.name + assert "Docket" in tasks_app.help + + def test_tasks_app_has_commands(self): + """Test that all expected commands are registered.""" + # Just verify the app exists and has the right metadata + # Detailed command testing is done in individual test classes + assert "tasks" in tasks_app.name + assert tasks_app.help diff --git a/tests/client/tasks/conftest.py b/tests/client/tasks/conftest.py new file mode 100644 index 0000000000..4a004de95d --- /dev/null +++ b/tests/client/tasks/conftest.py @@ -0,0 +1,15 @@ +"""Shared fixtures for client task tests.""" + +import pytest + +from fastmcp.utilities.tests import temporary_settings + + +@pytest.fixture(autouse=True) +async def enable_docket_and_tasks(): + """Enable Docket and task protocol support for all client task tests.""" + with temporary_settings( + enable_docket=True, + enable_tasks=True, + ): + yield diff --git a/tests/client/tasks/test_client_prompt_tasks.py b/tests/client/tasks/test_client_prompt_tasks.py new file mode 100644 index 0000000000..e1154e95f6 --- /dev/null +++ b/tests/client/tasks/test_client_prompt_tasks.py @@ -0,0 +1,103 @@ +""" +Tests for client-side prompt task methods. + +Tests the client's get_prompt_as_task method. +""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def prompt_server(): + """Create a test server with background-enabled prompts.""" + mcp = FastMCP("prompt-client-test") + + @mcp.prompt(task=True) + async def analysis_prompt(topic: str, style: str = "formal") -> str: + """Generate an analysis prompt.""" + return f"Analyze {topic} in a {style} style" + + @mcp.prompt(task=True) + async def creative_prompt(theme: str) -> str: + """Generate a creative writing prompt.""" + return f"Write a story about {theme}" + + return mcp + + +async def test_get_prompt_as_task_returns_prompt_task(prompt_server): + """get_prompt with task=True returns a PromptTask object.""" + async with Client(prompt_server) as client: + task = await client.get_prompt("analysis_prompt", {"topic": "AI"}, task=True) + + from fastmcp.client.client import PromptTask + + assert isinstance(task, PromptTask) + assert isinstance(task.task_id, str) + + +async def test_prompt_task_server_generated_id(prompt_server): + """get_prompt with task=True gets server-generated task ID.""" + async with Client(prompt_server) as client: + task = await client.get_prompt( + "creative_prompt", + {"theme": "future"}, + task=True, + ) + + # Server should generate a UUID task ID + assert task.task_id is not None + assert isinstance(task.task_id, str) + # UUIDs have hyphens + assert "-" in task.task_id + + +async def test_prompt_task_result_returns_get_prompt_result(prompt_server): + """PromptTask.result() returns GetPromptResult.""" + async with Client(prompt_server) as client: + task = await client.get_prompt( + "analysis_prompt", {"topic": "Robotics", "style": "casual"}, task=True + ) + + # Verify background execution + assert not task.returned_immediately + + # Get result + result = await task.result() + + # Result should be GetPromptResult + assert hasattr(result, "description") + assert hasattr(result, "messages") + # Check the rendered message content, not the description + assert len(result.messages) > 0 + assert "Analyze Robotics" in result.messages[0].content.text + + +async def test_prompt_task_await_syntax(prompt_server): + """PromptTask can be awaited directly.""" + async with Client(prompt_server) as client: + task = await client.get_prompt("creative_prompt", {"theme": "ocean"}, task=True) + + # Can await task directly + result = await task + assert "Write a story about ocean" in result.messages[0].content.text + + +async def test_prompt_task_status_and_wait(prompt_server): + """PromptTask supports status() and wait() methods.""" + async with Client(prompt_server) as client: + task = await client.get_prompt("analysis_prompt", {"topic": "Space"}, task=True) + + # Check status + status = await task.status() + assert status.status in ["working", "completed"] + + # Wait for completion + await task.wait(timeout=2.0) + + # Get result + result = await task.result() + assert "Analyze Space" in result.messages[0].content.text diff --git a/tests/client/tasks/test_client_resource_tasks.py b/tests/client/tasks/test_client_resource_tasks.py new file mode 100644 index 0000000000..20d59cd3d1 --- /dev/null +++ b/tests/client/tasks/test_client_resource_tasks.py @@ -0,0 +1,108 @@ +""" +Tests for client-side resource task methods. + +Tests the client's read_resource_as_task method. +""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def resource_server(): + """Create a test server with background-enabled resources.""" + mcp = FastMCP("resource-client-test") + + @mcp.resource("file://document.txt", task=True) + async def document() -> str: + """A document resource.""" + return "Document content here" + + @mcp.resource("file://data/{id}.json", task=True) + async def data_file(id: str) -> str: + """A parameterized data resource.""" + return f'{{"id": "{id}", "value": 42}}' + + return mcp + + +async def test_read_resource_as_task_returns_resource_task(resource_server): + """read_resource with task=True returns a ResourceTask object.""" + async with Client(resource_server) as client: + task = await client.read_resource("file://document.txt", task=True) + + from fastmcp.client.client import ResourceTask + + assert isinstance(task, ResourceTask) + assert isinstance(task.task_id, str) + + +async def test_resource_task_server_generated_id(resource_server): + """read_resource with task=True gets server-generated task ID.""" + async with Client(resource_server) as client: + task = await client.read_resource("file://document.txt", task=True) + + # Server should generate a UUID task ID + assert task.task_id is not None + assert isinstance(task.task_id, str) + # UUIDs have hyphens + assert "-" in task.task_id + + +async def test_resource_task_result_returns_read_resource_result(resource_server): + """ResourceTask.result() returns list of ReadResourceContents.""" + async with Client(resource_server) as client: + task = await client.read_resource("file://document.txt", task=True) + + # Verify background execution + assert not task.returned_immediately + + # Get result + result = await task.result() + + # Result should be list of ReadResourceContents + assert isinstance(result, list) + assert len(result) > 0 + assert result[0].text == "Document content here" + + +async def test_resource_task_await_syntax(resource_server): + """ResourceTask can be awaited directly.""" + async with Client(resource_server) as client: + task = await client.read_resource("file://document.txt", task=True) + + # Can await task directly + result = await task + assert result[0].text == "Document content here" + + +async def test_resource_template_task(resource_server): + """Resource templates work with task support.""" + async with Client(resource_server) as client: + task = await client.read_resource("file://data/999.json", task=True) + + # Verify background execution + assert not task.returned_immediately + + # Get result + result = await task.result() + assert '"id": "999"' in result[0].text + + +async def test_resource_task_status_and_wait(resource_server): + """ResourceTask supports status() and wait() methods.""" + async with Client(resource_server) as client: + task = await client.read_resource("file://document.txt", task=True) + + # Check status + status = await task.status() + assert status.status in ["working", "completed"] + + # Wait for completion + await task.wait(timeout=2.0) + + # Get result + result = await task.result() + assert "Document content" in result[0].text diff --git a/tests/client/tasks/test_client_task_notifications.py b/tests/client/tasks/test_client_task_notifications.py new file mode 100644 index 0000000000..8fba3aad27 --- /dev/null +++ b/tests/client/tasks/test_client_task_notifications.py @@ -0,0 +1,209 @@ +""" +Tests for client-side handling of notifications/tasks/status (SEP-1686 lines 436-444). + +Verifies that Task objects receive notifications, update their cache, wake up wait() calls, +and invoke user callbacks. +""" + +import asyncio +import time + +import pytest +from mcp.types import GetTaskResult + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def task_notification_server(): + """Server that sends task status notifications.""" + mcp = FastMCP("task-notification-test") + + @mcp.tool(task=True) + async def quick_task(value: int) -> int: + """Quick background task.""" + await asyncio.sleep(0.05) + return value * 2 + + @mcp.tool(task=True) + async def slow_task(duration: float = 0.2) -> str: + """Slow background task.""" + await asyncio.sleep(duration) + return "done" + + @mcp.tool(task=True) + async def failing_task() -> str: + """Task that fails.""" + raise ValueError("Intentional failure") + + return mcp + + +async def test_task_receives_status_notification(task_notification_server): + """Task object receives and processes status notifications.""" + async with Client(task_notification_server) as client: + task = await client.call_tool("quick_task", {"value": 5}, task=True) + + # Wait for task to complete (notification should arrive) + status = await task.wait(timeout=2.0) + + # Verify task completed + assert status.status == "completed" + + +async def test_status_cache_updated_by_notification(task_notification_server): + """Cached status is updated when notification arrives.""" + async with Client(task_notification_server) as client: + task = await client.call_tool("quick_task", {"value": 10}, task=True) + + # Wait for completion (notification should update cache) + await task.wait(timeout=2.0) + + # Status should be cached (no server call needed) + # Call status() twice - should return same cached object + status1 = await task.status() + status2 = await task.status() + + # Should be the exact same object (from cache) + assert status1 is status2 + assert status1.status == "completed" + + +async def test_callback_invoked_on_notification(task_notification_server): + """User callback is invoked when notification arrives.""" + callback_invocations = [] + + def status_callback(status: GetTaskResult): + """Sync callback.""" + callback_invocations.append(status) + + async with Client(task_notification_server) as client: + task = await client.call_tool("quick_task", {"value": 7}, task=True) + + # Register callback + task.on_status_change(status_callback) + + # Wait for completion + await task.wait(timeout=2.0) + + # Give callbacks a moment to fire + await asyncio.sleep(0.1) + + # Callback should have been invoked at least once + assert len(callback_invocations) > 0 + + # Should have received completed status + completed_statuses = [s for s in callback_invocations if s.status == "completed"] + assert len(completed_statuses) > 0 + + +async def test_async_callback_invoked(task_notification_server): + """Async callback is invoked when notification arrives.""" + callback_invocations = [] + + async def async_status_callback(status: GetTaskResult): + """Async callback.""" + await asyncio.sleep(0.01) # Simulate async work + callback_invocations.append(status) + + async with Client(task_notification_server) as client: + task = await client.call_tool("quick_task", {"value": 3}, task=True) + + # Register async callback + task.on_status_change(async_status_callback) + + # Wait for completion + await task.wait(timeout=2.0) + + # Give async callbacks time to complete + await asyncio.sleep(0.2) + + # Async callback should have been invoked + assert len(callback_invocations) > 0 + + +async def test_multiple_callbacks_all_invoked(task_notification_server): + """Multiple callbacks are all invoked.""" + callback1_calls = [] + callback2_calls = [] + + def callback1(status: GetTaskResult): + callback1_calls.append(status.status) + + def callback2(status: GetTaskResult): + callback2_calls.append(status.status) + + async with Client(task_notification_server) as client: + task = await client.call_tool("quick_task", {"value": 8}, task=True) + + task.on_status_change(callback1) + task.on_status_change(callback2) + + await task.wait(timeout=2.0) + await asyncio.sleep(0.1) + + # Both callbacks should have been invoked + assert len(callback1_calls) > 0 + assert len(callback2_calls) > 0 + + +async def test_callback_error_doesnt_break_notification(task_notification_server): + """Callback errors don't prevent other callbacks from running.""" + callback1_calls = [] + callback2_calls = [] + + def failing_callback(status: GetTaskResult): + callback1_calls.append("called") + raise ValueError("Callback intentionally fails") + + def working_callback(status: GetTaskResult): + callback2_calls.append(status.status) + + async with Client(task_notification_server) as client: + task = await client.call_tool("quick_task", {"value": 12}, task=True) + + task.on_status_change(failing_callback) + task.on_status_change(working_callback) + + await task.wait(timeout=2.0) + await asyncio.sleep(0.1) + + # Failing callback was called (and errored) + assert len(callback1_calls) > 0 + + # Working callback should still have been invoked + assert len(callback2_calls) > 0 + + +async def test_wait_wakes_early_on_notification(task_notification_server): + """wait() wakes up immediately when notification arrives, not after poll interval.""" + async with Client(task_notification_server) as client: + task = await client.call_tool("quick_task", {"value": 15}, task=True) + + # Record timing + start = time.time() + status = await task.wait(timeout=5.0) + elapsed = time.time() - start + + # Should complete much faster than the fallback poll interval (500ms) + # With notifications, should be < 200ms for quick task + # Without notifications, would take 500ms+ due to polling + assert elapsed < 1.0 # Very generous bound + assert status.status == "completed" + + +async def test_notification_with_failed_task(task_notification_server): + """Notifications work for failed tasks too.""" + async with Client(task_notification_server) as client: + task = await client.call_tool("failing_task", {}, task=True) + + with pytest.raises(Exception): + await task + + # Should have cached the failed status from notification + status = await task.status() + assert status.status == "failed" + assert ( + status.statusMessage is not None + ) # Error details in statusMessage per spec diff --git a/tests/client/tasks/test_client_task_protocol.py b/tests/client/tasks/test_client_task_protocol.py new file mode 100644 index 0000000000..e8b29afd94 --- /dev/null +++ b/tests/client/tasks/test_client_task_protocol.py @@ -0,0 +1,85 @@ +""" +Tests for client-side task protocol. + +Generic protocol tests that use tools as test fixtures. +""" + +import asyncio + +from fastmcp import FastMCP +from fastmcp.client import Client + + +async def test_end_to_end_task_flow(): + """Complete end-to-end flow: submit, poll, retrieve.""" + start_signal = asyncio.Event() + complete_signal = asyncio.Event() + + mcp = FastMCP("protocol-test") + + @mcp.tool(task=True) + async def controlled_tool(message: str) -> str: + """Tool with controlled execution.""" + start_signal.set() + await complete_signal.wait() + return f"Processed: {message}" + + async with Client(mcp) as client: + # Submit task + task = await client.call_tool( + "controlled_tool", {"message": "integration test"}, task=True + ) + + # Wait for execution to start + await asyncio.wait_for(start_signal.wait(), timeout=2.0) + + # Check status while running + status = await task.status() + assert status.status in ["working"] + + # Signal completion + complete_signal.set() + + # Wait for task to finish and retrieve result + result = await task.result() + assert result.data == "Processed: integration test" + + +async def test_multiple_concurrent_tasks(): + """Multiple tasks can run concurrently.""" + mcp = FastMCP("concurrent-test") + + @mcp.tool(task=True) + async def multiply(a: int, b: int) -> int: + return a * b + + async with Client(mcp) as client: + # Submit multiple tasks + tasks = [] + for i in range(5): + task = await client.call_tool("multiply", {"a": i, "b": 2}, task=True) + tasks.append((task, i * 2)) + + # Wait for all to complete and verify results + for task, expected in tasks: + result = await task.result() + assert result.data == expected + + +async def test_task_id_auto_generation(): + """Task IDs are auto-generated if not provided.""" + mcp = FastMCP("id-test") + + @mcp.tool(task=True) + async def echo(message: str) -> str: + return f"Echo: {message}" + + async with Client(mcp) as client: + # Submit without custom task ID + task_1 = await client.call_tool("echo", {"message": "first"}, task=True) + task_2 = await client.call_tool("echo", {"message": "second"}, task=True) + + # Should generate different IDs + assert task_1.task_id != task_2.task_id + assert len(task_1.task_id) > 0 + assert len(task_2.task_id) > 0 diff --git a/tests/client/tasks/test_client_tool_tasks.py b/tests/client/tasks/test_client_tool_tasks.py new file mode 100644 index 0000000000..33dc1ce9a9 --- /dev/null +++ b/tests/client/tasks/test_client_tool_tasks.py @@ -0,0 +1,88 @@ +""" +Tests for client-side tool task methods. + +Tests the client's tool-specific task functionality, parallel to +test_client_prompt_tasks.py and test_client_resource_tasks.py. +""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def tool_task_server(): + """Create a test server with task-enabled tools.""" + mcp = FastMCP("tool-task-test") + + @mcp.tool(task=True) + async def echo(message: str) -> str: + """Echo back the message.""" + return f"Echo: {message}" + + @mcp.tool(task=True) + async def multiply(a: int, b: int) -> int: + """Multiply two numbers.""" + return a * b + + return mcp + + +async def test_call_tool_as_task_returns_tool_task(tool_task_server): + """call_tool with task=True returns a ToolTask object.""" + async with Client(tool_task_server) as client: + task = await client.call_tool("echo", {"message": "hello"}, task=True) + + from fastmcp.client.client import ToolTask + + assert isinstance(task, ToolTask) + assert isinstance(task.task_id, str) + assert len(task.task_id) > 0 + + +async def test_tool_task_server_generated_id(tool_task_server): + """call_tool with task=True gets server-generated task ID.""" + async with Client(tool_task_server) as client: + task = await client.call_tool("echo", {"message": "test"}, task=True) + + # Server should generate a UUID task ID + assert task.task_id is not None + assert isinstance(task.task_id, str) + # UUIDs have hyphens + assert "-" in task.task_id + + +async def test_tool_task_result_returns_call_tool_result(tool_task_server): + """ToolTask.result() returns CallToolResult with tool data.""" + async with Client(tool_task_server) as client: + task = await client.call_tool("multiply", {"a": 6, "b": 7}, task=True) + assert not task.returned_immediately + + result = await task.result() + assert result.data == 42 + + +async def test_tool_task_await_syntax(tool_task_server): + """Tool tasks can be awaited directly to get result.""" + async with Client(tool_task_server) as client: + task = await client.call_tool("multiply", {"a": 7, "b": 6}, task=True) + + # Can await task directly (syntactic sugar for task.result()) + result = await task + assert result.data == 42 + + +async def test_tool_task_status_and_wait(tool_task_server): + """ToolTask.status() returns GetTaskResult.""" + async with Client(tool_task_server) as client: + task = await client.call_tool("echo", {"message": "test"}, task=True) + + status = await task.status() + assert status.taskId == task.task_id + assert status.status in ["working", "completed"] + + # Wait for completion + await task.wait(timeout=2.0) + final_status = await task.status() + assert final_status.status == "completed" diff --git a/tests/client/tasks/test_task_context_validation.py b/tests/client/tasks/test_task_context_validation.py new file mode 100644 index 0000000000..ea236f2866 --- /dev/null +++ b/tests/client/tasks/test_task_context_validation.py @@ -0,0 +1,210 @@ +""" +Tests for Task client context validation. + +Verifies that Task methods properly validate client context and that +cached results remain accessible outside context. +""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def task_server(): + """Create a test server with background tasks.""" + mcp = FastMCP("context-test-server") + + @mcp.tool(task=True) + async def background_tool(value: str) -> str: + """Tool that runs in background.""" + return f"Result: {value}" + + @mcp.prompt(task=True) + async def background_prompt(topic: str) -> str: + """Prompt that runs in background.""" + return f"Prompt about {topic}" + + @mcp.resource("file://background.txt", task=True) + async def background_resource() -> str: + """Resource that runs in background.""" + return "Background resource content" + + return mcp + + +async def test_task_status_outside_context_raises(task_server): + """Calling task.status() outside client context raises error.""" + task = None + async with Client(task_server) as client: + task = await client.call_tool("background_tool", {"value": "test"}, task=True) + assert not task.returned_immediately + # Now outside context + + with pytest.raises(RuntimeError, match="outside client context"): + await task.status() + + +async def test_task_result_outside_context_raises(task_server): + """Calling task.result() outside context raises error.""" + task = None + async with Client(task_server) as client: + task = await client.call_tool("background_tool", {"value": "test"}, task=True) + assert not task.returned_immediately + # Now outside context + + with pytest.raises(RuntimeError, match="outside client context"): + await task.result() + + +async def test_task_wait_outside_context_raises(task_server): + """Calling task.wait() outside context raises error.""" + task = None + async with Client(task_server) as client: + task = await client.call_tool("background_tool", {"value": "test"}, task=True) + assert not task.returned_immediately + # Now outside context + + with pytest.raises(RuntimeError, match="outside client context"): + await task.wait() + + +async def test_task_cancel_outside_context_raises(task_server): + """Calling task.cancel() outside context raises error.""" + task = None + async with Client(task_server) as client: + task = await client.call_tool("background_tool", {"value": "test"}, task=True) + assert not task.returned_immediately + # Now outside context + + with pytest.raises(RuntimeError, match="outside client context"): + await task.cancel() + + +async def test_cached_tool_task_accessible_outside_context(task_server): + """Tool tasks with cached results work outside context.""" + task = None + async with Client(task_server) as client: + task = await client.call_tool("background_tool", {"value": "test"}, task=True) + assert not task.returned_immediately + + # Get result once to cache it + result1 = await task.result() + assert result1.data == "Result: test" + # Now outside context + + # Should work because result is cached + result2 = await task.result() + assert result2 is result1 # Same object + assert result2.data == "Result: test" + + +async def test_cached_prompt_task_accessible_outside_context(task_server): + """Prompt tasks with cached results work outside context.""" + task = None + async with Client(task_server) as client: + task = await client.get_prompt( + "background_prompt", {"topic": "test"}, task=True + ) + assert not task.returned_immediately + + # Get result once to cache it + result1 = await task.result() + assert result1.description == "Prompt that runs in background." + # Now outside context + + # Should work because result is cached + result2 = await task.result() + assert result2 is result1 # Same object + assert result2.description == "Prompt that runs in background." + + +async def test_cached_resource_task_accessible_outside_context(task_server): + """Resource tasks with cached results work outside context.""" + task = None + async with Client(task_server) as client: + task = await client.read_resource("file://background.txt", task=True) + assert not task.returned_immediately + + # Get result once to cache it + result1 = await task.result() + assert len(result1) > 0 + # Now outside context + + # Should work because result is cached + result2 = await task.result() + assert result2 is result1 # Same object + + +async def test_uncached_status_outside_context_raises(task_server): + """Even after caching result, status() still requires client context.""" + task = None + async with Client(task_server) as client: + task = await client.call_tool("background_tool", {"value": "test"}, task=True) + assert not task.returned_immediately + + # Cache the result + await task.result() + # Now outside context + + # result() works (cached) + result = await task.result() + assert result.data == "Result: test" + + # But status() still needs client connection + with pytest.raises(RuntimeError, match="outside client context"): + await task.status() + + +async def test_task_await_syntax_outside_context_raises(task_server): + """Using await task syntax outside context raises error for background tasks.""" + task = None + async with Client(task_server) as client: + task = await client.call_tool("background_tool", {"value": "test"}, task=True) + assert not task.returned_immediately + # Now outside context + + with pytest.raises(RuntimeError, match="outside client context"): + await task # Same as await task.result() + + +async def test_task_await_syntax_works_for_cached_results(task_server): + """Using await task syntax works outside context when result is cached.""" + task = None + async with Client(task_server) as client: + task = await client.call_tool("background_tool", {"value": "test"}, task=True) + result1 = await task # Cache it + # Now outside context + + result2 = await task # Should work (cached) + assert result2 is result1 + assert result2.data == "Result: test" + + +async def test_multiple_result_calls_return_same_cached_object(task_server): + """Multiple result() calls return the same cached object.""" + async with Client(task_server) as client: + task = await client.call_tool("background_tool", {"value": "test"}, task=True) + + result1 = await task.result() + result2 = await task.result() + result3 = await task.result() + + # Should all be the same object (cached) + assert result1 is result2 + assert result2 is result3 + + +async def test_background_task_properties_accessible_outside_context(task_server): + """Background task properties like task_id accessible outside context.""" + task = None + async with Client(task_server) as client: + task = await client.call_tool("background_tool", {"value": "test"}, task=True) + task_id_inside = task.task_id + assert not task.returned_immediately + # Now outside context + + # Properties should still be accessible (they don't need client connection) + assert task.task_id == task_id_inside + assert task.returned_immediately is False diff --git a/tests/client/tasks/test_task_result_caching.py b/tests/client/tasks/test_task_result_caching.py new file mode 100644 index 0000000000..8e75f0cae4 --- /dev/null +++ b/tests/client/tasks/test_task_result_caching.py @@ -0,0 +1,316 @@ +""" +Tests for Task result caching behavior. + +Verifies that Task.result() and await task cache results properly to avoid +redundant server calls and ensure consistent object identity. +""" + +from fastmcp import FastMCP +from fastmcp.client import Client + + +async def test_tool_task_result_cached_on_first_call(): + """First call caches result, subsequent calls return cached value.""" + call_count = 0 + mcp = FastMCP("test") + + @mcp.tool(task=True) + async def counting_tool() -> int: + nonlocal call_count + call_count += 1 + return call_count + + async with Client(mcp) as client: + task = await client.call_tool("counting_tool", task=True) + + result1 = await task.result() + result2 = await task.result() + result3 = await task.result() + + # All should return 1 (first execution value) + assert result1.data == 1 + assert result2.data == 1 + assert result3.data == 1 + + # Verify they're the same object (cached) + assert result1 is result2 is result3 + + +async def test_prompt_task_result_cached(): + """PromptTask caches results on first call.""" + call_count = 0 + mcp = FastMCP("test") + + @mcp.prompt(task=True) + async def counting_prompt() -> str: + nonlocal call_count + call_count += 1 + return f"Call number: {call_count}" + + async with Client(mcp) as client: + task = await client.get_prompt("counting_prompt", task=True) + + result1 = await task.result() + result2 = await task.result() + result3 = await task.result() + + # All should return same content + assert result1.messages[0].content.text == "Call number: 1" + assert result2.messages[0].content.text == "Call number: 1" + assert result3.messages[0].content.text == "Call number: 1" + + # Verify they're the same object (cached) + assert result1 is result2 is result3 + + +async def test_resource_task_result_cached(): + """ResourceTask caches results on first call.""" + call_count = 0 + mcp = FastMCP("test") + + @mcp.resource("file://counter.txt", task=True) + async def counting_resource() -> str: + nonlocal call_count + call_count += 1 + return f"Count: {call_count}" + + async with Client(mcp) as client: + task = await client.read_resource("file://counter.txt", task=True) + + result1 = await task.result() + result2 = await task.result() + result3 = await task.result() + + # All should return same content + assert result1[0].text == "Count: 1" + assert result2[0].text == "Count: 1" + assert result3[0].text == "Count: 1" + + # Verify they're the same object (cached) + assert result1 is result2 is result3 + + +async def test_multiple_await_returns_same_object(): + """Multiple await task calls return identical object.""" + mcp = FastMCP("test") + + @mcp.tool(task=True) + async def sample_tool() -> str: + return "result" + + async with Client(mcp) as client: + task = await client.call_tool("sample_tool", task=True) + + result1 = await task + result2 = await task + result3 = await task + + # Should be exact same object in memory + assert result1 is result2 is result3 + assert id(result1) == id(result2) == id(result3) + + +async def test_result_and_await_share_cache(): + """task.result() and await task share the same cache.""" + mcp = FastMCP("test") + + @mcp.tool(task=True) + async def sample_tool() -> str: + return "cached" + + async with Client(mcp) as client: + task = await client.call_tool("sample_tool", task=True) + + # Call result() first + result_via_method = await task.result() + + # Then await directly + result_via_await = await task + + # Should be the same cached object + assert result_via_method is result_via_await + assert id(result_via_method) == id(result_via_await) + + +async def test_immediate_task_caches_result(): + """Immediate tasks (graceful degradation) also cache results.""" + call_count = 0 + mcp = FastMCP("test") + + # Tool with task=False - will execute immediately + @mcp.tool(task=False) + async def non_task_tool() -> int: + nonlocal call_count + call_count += 1 + return call_count + + async with Client(mcp) as client: + # Request as task, but server will execute immediately + task = await client.call_tool("non_task_tool", task=True) + + # Should be immediate (graceful degradation) + assert task.returned_immediately + + result1 = await task.result() + result2 = await task.result() + result3 = await task.result() + + # All should return cached value + assert result1.data == 1 + assert result2.data == 1 + assert result3.data == 1 + + # Verify they're the same object (cached) + assert result1 is result2 is result3 + + +async def test_immediate_prompt_task_caches_result(): + """Immediate prompt tasks cache results.""" + call_count = 0 + mcp = FastMCP("test") + + @mcp.prompt(task=False) + async def non_task_prompt() -> str: + nonlocal call_count + call_count += 1 + return f"Immediate: {call_count}" + + async with Client(mcp) as client: + task = await client.get_prompt("non_task_prompt", task=True) + + # Should be immediate + assert task.returned_immediately + + result1 = await task.result() + result2 = await task.result() + + # Verify caching + assert result1 is result2 + assert result1.messages[0].content.text == "Immediate: 1" + + +async def test_immediate_resource_task_caches_result(): + """Immediate resource tasks cache results.""" + call_count = 0 + mcp = FastMCP("test") + + @mcp.resource("file://immediate.txt", task=False) + async def non_task_resource() -> str: + nonlocal call_count + call_count += 1 + return f"Immediate: {call_count}" + + async with Client(mcp) as client: + task = await client.read_resource("file://immediate.txt", task=True) + + # Should be immediate + assert task.returned_immediately + + result1 = await task.result() + result2 = await task.result() + + # Verify caching + assert result1 is result2 + assert result1[0].text == "Immediate: 1" + + +async def test_cache_persists_across_mixed_access_patterns(): + """Cache works correctly when mixing result() and await.""" + mcp = FastMCP("test") + + @mcp.tool(task=True) + async def mixed_tool() -> str: + return "mixed" + + async with Client(mcp) as client: + task = await client.call_tool("mixed_tool", task=True) + + # Access in various orders + result1 = await task + result2 = await task.result() + result3 = await task + result4 = await task.result() + + # All should be the same cached object + assert result1 is result2 is result3 is result4 + + +async def test_different_tasks_have_separate_caches(): + """Different task instances maintain separate caches.""" + mcp = FastMCP("test") + + @mcp.tool(task=True) + async def separate_tool(value: str) -> str: + return f"Result: {value}" + + async with Client(mcp) as client: + task1 = await client.call_tool("separate_tool", {"value": "A"}, task=True) + task2 = await client.call_tool("separate_tool", {"value": "B"}, task=True) + + result1 = await task1.result() + result2 = await task2.result() + + # Different results + assert result1.data == "Result: A" + assert result2.data == "Result: B" + + # Not the same object + assert result1 is not result2 + + # But each task's cache works independently + result1_again = await task1.result() + result2_again = await task2.result() + + assert result1 is result1_again + assert result2 is result2_again + + +async def test_cache_survives_status_checks(): + """Calling status() doesn't affect result caching.""" + mcp = FastMCP("test") + + @mcp.tool(task=True) + async def status_check_tool() -> str: + return "status" + + async with Client(mcp) as client: + task = await client.call_tool("status_check_tool", task=True) + + # Check status multiple times + await task.status() + await task.status() + + result1 = await task.result() + + # Check status again + await task.status() + + result2 = await task.result() + + # Cache should still work + assert result1 is result2 + + +async def test_cache_survives_wait_calls(): + """Calling wait() doesn't affect result caching.""" + mcp = FastMCP("test") + + @mcp.tool(task=True) + async def wait_test_tool() -> str: + return "waited" + + async with Client(mcp) as client: + task = await client.call_tool("wait_test_tool", task=True) + + # Wait for completion + await task.wait() + + result1 = await task.result() + + # Wait again (no-op since completed) + await task.wait() + + result2 = await task.result() + + # Cache should still work + assert result1 is result2 diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 9dc4bfc905..7f1a29f6c3 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -463,6 +463,7 @@ async def test_server_info_custom_version(): async with client: result = client.initialize_result + assert result is not None assert result.serverInfo.name == "CustomVersionServer" assert result.serverInfo.version == "1.2.3" @@ -472,6 +473,7 @@ async def test_server_info_custom_version(): async with client: result = client.initialize_result + assert result is not None assert result.serverInfo.name == "DefaultVersionServer" # Should fall back to FastMCP version assert result.serverInfo.version == fastmcp.__version__ @@ -1130,7 +1132,9 @@ def greet(name: str) -> str: client = Client(server) async with client: - assert client.initialize_result.instructions == "Use the greet tool!" + result = client.initialize_result + assert result is not None + assert result.instructions == "Use the greet tool!" async def test_initialize_timeout_custom(self, fastmcp_server): """Test custom timeout for initialize().""" @@ -1148,6 +1152,7 @@ async def test_initialize_property_after_auto_init(self, fastmcp_server): async with client: # Access via property result = client.initialize_result + assert result is not None assert result.serverInfo.name == "TestServer" # Call method - should return cached diff --git a/tests/server/middleware/test_caching.py b/tests/server/middleware/test_caching.py index ed90d6345c..807ff70c36 100644 --- a/tests/server/middleware/test_caching.py +++ b/tests/server/middleware/test_caching.py @@ -1,5 +1,6 @@ """Tests for response caching middleware.""" +import sys import tempfile from unittest.mock import AsyncMock, MagicMock @@ -277,6 +278,10 @@ def test_tool_call_filtering( assert middleware1._matches_tool_cache_settings(tool_name=tool_name) is result +@pytest.mark.skipif( + sys.platform == "win32", + reason="SQLite caching tests are flaky on Windows due to temp directory issues.", +) class TestResponseCachingMiddlewareIntegration: """Integration tests with real FastMCP server.""" diff --git a/tests/server/middleware/test_logging.py b/tests/server/middleware/test_logging.py index 045b998bb9..34884d256f 100644 --- a/tests/server/middleware/test_logging.py +++ b/tests/server/middleware/test_logging.py @@ -332,7 +332,7 @@ async def test_on_message_with_resource_template_in_payload( assert get_log_lines(caplog) == snapshot( [ - '{"event": "request_start", "method": "test_method", "source": "client", "payload": "{\\"name\\":\\"tmpl\\",\\"title\\":null,\\"description\\":null,\\"icons\\":null,\\"tags\\":[],\\"meta\\":null,\\"enabled\\":true,\\"uri_template\\":\\"tmpl://{id}\\",\\"mime_type\\":\\"text/plain\\",\\"parameters\\":{\\"id\\":{\\"type\\":\\"string\\"}},\\"annotations\\":null}", "payload_type": "ResourceTemplate"}', + '{"event": "request_start", "method": "test_method", "source": "client", "payload": "{\\"name\\":\\"tmpl\\",\\"title\\":null,\\"description\\":null,\\"icons\\":null,\\"tags\\":[],\\"meta\\":null,\\"enabled\\":true,\\"uri_template\\":\\"tmpl://{id}\\",\\"mime_type\\":\\"text/plain\\",\\"parameters\\":{\\"id\\":{\\"type\\":\\"string\\"}},\\"annotations\\":null,\\"task\\":false}", "payload_type": "ResourceTemplate"}', '{"event": "request_success", "method": "test_method", "source": "client", "duration_ms": 0.02}', ] ) diff --git a/tests/server/tasks/__init__.py b/tests/server/tasks/__init__.py new file mode 100644 index 0000000000..84d43c0082 --- /dev/null +++ b/tests/server/tasks/__init__.py @@ -0,0 +1 @@ +"""Tests for MCP SEP-1686 background tasks.""" diff --git a/tests/server/tasks/conftest.py b/tests/server/tasks/conftest.py new file mode 100644 index 0000000000..59bf024f61 --- /dev/null +++ b/tests/server/tasks/conftest.py @@ -0,0 +1,21 @@ +"""Shared fixtures for task tests.""" + +import pytest + +from fastmcp.utilities.tests import temporary_settings + + +@pytest.fixture(autouse=True) +async def enable_docket_and_tasks(): + """Enable Docket and task protocol support for all task tests.""" + with temporary_settings( + enable_docket=True, + enable_tasks=True, + ): + # Verify both are enabled + import fastmcp + + assert fastmcp.settings.enable_docket, "Docket should be enabled after fixture" + assert fastmcp.settings.enable_tasks, "Tasks should be enabled after fixture" + + yield diff --git a/tests/server/tasks/test_progress_dependency.py b/tests/server/tasks/test_progress_dependency.py new file mode 100644 index 0000000000..9c1b8ddcbb --- /dev/null +++ b/tests/server/tasks/test_progress_dependency.py @@ -0,0 +1,176 @@ +"""Tests for FastMCP Progress dependency.""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.dependencies import Progress +from fastmcp.utilities.tests import temporary_settings + + +async def test_progress_in_immediate_execution(): + """Test Progress dependency when calling tool immediately with Docket enabled.""" + mcp = FastMCP("test") + + @mcp.tool() + async def test_tool(progress: Progress = Progress()) -> str: + await progress.set_total(10) + await progress.increment() + await progress.set_message("Testing") + return "done" + + async with Client(mcp) as client: + result = await client.call_tool("test_tool", {}) + from mcp.types import TextContent + + assert isinstance(result.content[0], TextContent) + assert result.content[0].text == "done" + + +async def test_progress_in_background_task(): + """Test Progress dependency in background task execution.""" + mcp = FastMCP("test") + + @mcp.tool(task=True) + async def test_task(progress: Progress = Progress()) -> str: + await progress.set_total(5) + await progress.increment() + await progress.set_message("Step 1") + return "done" + + async with Client(mcp) as client: + task = await client.call_tool("test_task", {}, task=True) + result = await task.result() + from mcp.types import TextContent + + assert isinstance(result.content[0], TextContent) + assert result.content[0].text == "done" + + +async def test_progress_tracks_multiple_increments(): + """Test that Progress correctly tracks multiple increment calls.""" + mcp = FastMCP("test") + + @mcp.tool() + async def count_to_ten(progress: Progress = Progress()) -> str: + await progress.set_total(10) + for i in range(10): + await progress.increment() + return "counted" + + async with Client(mcp) as client: + result = await client.call_tool("count_to_ten", {}) + from mcp.types import TextContent + + assert isinstance(result.content[0], TextContent) + assert result.content[0].text == "counted" + + +async def test_progress_status_message_in_background_task(): + """Regression test: TaskStatusResponse must include statusMessage field.""" + import asyncio + + mcp = FastMCP("test") + step_started = asyncio.Event() + + @mcp.tool(task=True) + async def task_with_progress(progress: Progress = Progress()) -> str: + await progress.set_total(3) + await progress.set_message("Step 1 of 3") + await progress.increment() + step_started.set() + + # Give test time to poll status + await asyncio.sleep(0.2) + + await progress.set_message("Step 2 of 3") + await progress.increment() + await progress.set_message("Step 3 of 3") + await progress.increment() + return "done" + + async with Client(mcp) as client: + task = await client.call_tool("task_with_progress", {}, task=True) + + # Wait for first step to start + await step_started.wait() + + # Get status and verify progress message + status = await task.status() + + # Verify statusMessage field is accessible and contains progress info + # Should not raise AttributeError + msg = status.statusMessage + assert msg is None or msg.startswith("Step") + + # Wait for completion + result = await task.result() + from mcp.types import TextContent + + assert isinstance(result.content[0], TextContent) + assert result.content[0].text == "done" + + +async def test_progress_fails_without_docket(): + """Test Progress dependency fails when Docket is not enabled.""" + with temporary_settings(enable_docket=False, enable_tasks=False): + mcp = FastMCP("test") + + @mcp.tool() + async def test_tool(progress: Progress = Progress()) -> str: + return "done" + + async with Client(mcp) as client: + with pytest.raises(Exception) as exc_info: + await client.call_tool("test_tool", {}) + + error_str = str(exc_info.value) + assert "Failed to resolve dependency" in error_str + assert "progress" in error_str + + +async def test_inmemory_progress_state(): + """Test that in-memory progress stores and returns state correctly.""" + mcp = FastMCP("test") + + @mcp.tool() + async def test_tool(progress: Progress = Progress()) -> dict: + # Initial state + assert progress.current is None + assert progress.total == 1 + assert progress.message is None + + # Set total + await progress.set_total(10) + assert progress.total == 10 + + # Increment + await progress.increment() + assert progress.current == 1 + + # Increment again + await progress.increment(2) + assert progress.current == 3 + + # Set message + await progress.set_message("Testing") + assert progress.message == "Testing" + + return { + "current": progress.current, + "total": progress.total, + "message": progress.message, + } + + async with Client(mcp) as client: + result = await client.call_tool("test_tool", {}) + from mcp.types import TextContent + + assert isinstance(result.content[0], TextContent) + # The tool returns a dict showing the final state + import json + + state = json.loads(result.content[0].text) + assert state["current"] == 3 + assert state["total"] == 10 + assert state["message"] == "Testing" diff --git a/tests/server/tasks/test_server_tasks_parameter.py b/tests/server/tasks/test_server_tasks_parameter.py new file mode 100644 index 0000000000..cc6c38b1cc --- /dev/null +++ b/tests/server/tasks/test_server_tasks_parameter.py @@ -0,0 +1,317 @@ +""" +Tests for server `tasks` parameter default inheritance. + +Verifies that the server's `tasks` parameter correctly sets defaults for all +components (tools, prompts, resources), and that explicit component-level +settings properly override the server default. +""" + +from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.utilities.tests import temporary_settings + + +async def test_server_tasks_true_defaults_all_components(): + """Server with tasks=True makes all components default to supporting tasks.""" + mcp = FastMCP("test", tasks=True) + + @mcp.tool() + async def my_tool() -> str: + return "tool result" + + @mcp.prompt() + async def my_prompt() -> str: + return "prompt result" + + @mcp.resource("test://resource") + async def my_resource() -> str: + return "resource result" + + async with Client(mcp) as client: + # Verify all task-enabled components are registered with docket + docket = mcp.docket + assert docket is not None + assert "my_tool" in docket.tasks + assert "my_prompt" in docket.tasks + assert "my_resource" in docket.tasks + + # Tool should support background execution + tool_task = await client.call_tool("my_tool", task=True) + assert not tool_task.returned_immediately + + # Prompt should support background execution + prompt_task = await client.get_prompt("my_prompt", task=True) + assert not prompt_task.returned_immediately + + # Resource should support background execution + resource_task = await client.read_resource("test://resource", task=True) + assert not resource_task.returned_immediately + + +async def test_server_tasks_false_defaults_all_components(): + """Server with tasks=False makes all components default to NOT supporting tasks.""" + mcp = FastMCP("test", tasks=False) + + @mcp.tool() + async def my_tool() -> str: + return "tool result" + + @mcp.prompt() + async def my_prompt() -> str: + return "prompt result" + + @mcp.resource("test://resource") + async def my_resource() -> str: + return "resource result" + + async with Client(mcp) as client: + # Tool should execute immediately (graceful degradation) + tool_task = await client.call_tool("my_tool", task=True) + assert tool_task.returned_immediately + + # Prompt should execute immediately (graceful degradation) + prompt_task = await client.get_prompt("my_prompt", task=True) + assert prompt_task.returned_immediately + + # Resource should execute immediately (graceful degradation) + resource_task = await client.read_resource("test://resource", task=True) + assert resource_task.returned_immediately + + +async def test_server_tasks_none_uses_settings(): + """Server with tasks=None (or omitted) uses global settings.""" + # Test with enable_tasks=True in settings + with temporary_settings( + enable_docket=True, + enable_tasks=True, + ): + mcp = FastMCP("test") # tasks=None, should use settings + + @mcp.tool() + async def my_tool() -> str: + return "tool result" + + async with Client(mcp) as client: + # Tool should support background execution (from settings) + tool_task = await client.call_tool("my_tool", task=True) + assert not tool_task.returned_immediately + + # Test with enable_tasks=False in settings + with temporary_settings( + enable_docket=True, + enable_tasks=False, + ): + mcp2 = FastMCP("test2") # tasks=None, should use settings + + @mcp2.tool() + async def my_tool2() -> str: + return "tool result" + + async with Client(mcp2) as client: + # Tool should execute immediately (from settings) + tool_task = await client.call_tool("my_tool2", task=True) + assert tool_task.returned_immediately + + +async def test_component_explicit_false_overrides_server_true(): + """Component with task=False overrides server default of tasks=True.""" + mcp = FastMCP("test", tasks=True) + + @mcp.tool(task=False) + async def no_task_tool() -> str: + return "immediate result" + + @mcp.tool() + async def default_tool() -> str: + return "background result" + + async with Client(mcp) as client: + # Verify docket registration matches task settings + docket = mcp.docket + assert docket is not None + assert "no_task_tool" not in docket.tasks # task=False means not registered + assert "default_tool" in docket.tasks # Inherits tasks=True + + # Explicit False should execute immediately despite server default + no_task = await client.call_tool("no_task_tool", task=True) + assert no_task.returned_immediately + + # Default should support background execution + default_task = await client.call_tool("default_tool", task=True) + assert not default_task.returned_immediately + + +async def test_component_explicit_true_overrides_server_false(): + """Component with task=True overrides server default of tasks=False.""" + mcp = FastMCP("test", tasks=False) + + @mcp.tool(task=True) + async def task_tool() -> str: + return "background result" + + @mcp.tool() + async def default_tool() -> str: + return "immediate result" + + async with Client(mcp) as client: + # Verify docket registration matches task settings + docket = mcp.docket + assert docket is not None + assert "task_tool" in docket.tasks # task=True means registered + assert "default_tool" not in docket.tasks # Inherits tasks=False + + # Explicit True should support background execution despite server default + task = await client.call_tool("task_tool", task=True) + assert not task.returned_immediately + + # Default should execute immediately + default = await client.call_tool("default_tool", task=True) + assert default.returned_immediately + + +async def test_mixed_explicit_and_inherited(): + """Mix of explicit True/False/None on different components.""" + mcp = FastMCP("test", tasks=True) # Server default is True + + @mcp.tool() + async def inherited_tool() -> str: + return "inherits True" + + @mcp.tool(task=True) + async def explicit_true_tool() -> str: + return "explicit True" + + @mcp.tool(task=False) + async def explicit_false_tool() -> str: + return "explicit False" + + @mcp.prompt() + async def inherited_prompt() -> str: + return "inherits True" + + @mcp.prompt(task=False) + async def explicit_false_prompt() -> str: + return "explicit False" + + @mcp.resource("test://inherited") + async def inherited_resource() -> str: + return "inherits True" + + @mcp.resource("test://explicit_false", task=False) + async def explicit_false_resource() -> str: + return "explicit False" + + async with Client(mcp) as client: + # Verify docket registration matches task settings + docket = mcp.docket + assert docket is not None + # task=True (explicit or inherited) means registered + assert "inherited_tool" in docket.tasks + assert "explicit_true_tool" in docket.tasks + assert "inherited_prompt" in docket.tasks + assert "inherited_resource" in docket.tasks + # task=False means NOT registered + assert "explicit_false_tool" not in docket.tasks + assert "explicit_false_prompt" not in docket.tasks + assert "explicit_false_resource" not in docket.tasks + + # Tools + inherited = await client.call_tool("inherited_tool", task=True) + assert not inherited.returned_immediately + + explicit_true = await client.call_tool("explicit_true_tool", task=True) + assert not explicit_true.returned_immediately + + explicit_false = await client.call_tool("explicit_false_tool", task=True) + assert explicit_false.returned_immediately + + # Prompts + inherited_prompt_task = await client.get_prompt("inherited_prompt", task=True) + assert not inherited_prompt_task.returned_immediately + + explicit_false_prompt_task = await client.get_prompt( + "explicit_false_prompt", task=True + ) + assert explicit_false_prompt_task.returned_immediately + + # Resources + inherited_resource_task = await client.read_resource( + "test://inherited", task=True + ) + assert not inherited_resource_task.returned_immediately + + explicit_false_resource_task = await client.read_resource( + "test://explicit_false", task=True + ) + assert explicit_false_resource_task.returned_immediately + + +async def test_server_tasks_parameter_sets_component_defaults(): + """Server tasks parameter sets component defaults but global settings gate protocol.""" + # Server tasks=True sets component defaults, but enable_tasks must be True + with temporary_settings( + enable_docket=True, + enable_tasks=True, + ): + mcp = FastMCP("test", tasks=True) + + @mcp.tool() + async def tool_inherits_true() -> str: + return "tool result" + + async with Client(mcp) as client: + # Tool inherits tasks=True from server + tool_task = await client.call_tool("tool_inherits_true", task=True) + assert not tool_task.returned_immediately + + # Server tasks=False sets component defaults + with temporary_settings( + enable_docket=True, + enable_tasks=True, + ): + mcp2 = FastMCP("test2", tasks=False) + + @mcp2.tool() + async def tool_inherits_false() -> str: + return "tool result" + + async with Client(mcp2) as client: + # Tool inherits tasks=False from server (graceful degradation) + tool_task = await client.call_tool("tool_inherits_false", task=True) + assert tool_task.returned_immediately + + +async def test_resource_template_inherits_server_tasks_default(): + """Resource templates inherit server tasks default.""" + mcp = FastMCP("test", tasks=True) + + @mcp.resource("test://{item_id}") + async def templated_resource(item_id: str) -> str: + return f"resource {item_id}" + + async with Client(mcp) as client: + # Template should support background execution + resource_task = await client.read_resource("test://123", task=True) + assert not resource_task.returned_immediately + + +async def test_multiple_components_same_name_different_tasks(): + """Different component types with same name can have different task settings.""" + mcp = FastMCP("test", tasks=False) + + @mcp.tool(task=True) + async def shared_name() -> str: + return "tool result" + + @mcp.prompt() + async def shared_name_prompt() -> str: + return "prompt result" + + async with Client(mcp) as client: + # Tool with explicit True should support background execution + tool_task = await client.call_tool("shared_name", task=True) + assert not tool_task.returned_immediately + + # Prompt inheriting False should execute immediately + prompt_task = await client.get_prompt("shared_name_prompt", task=True) + assert prompt_task.returned_immediately diff --git a/tests/server/tasks/test_sync_function_task_disabled.py b/tests/server/tasks/test_sync_function_task_disabled.py new file mode 100644 index 0000000000..318c0fc164 --- /dev/null +++ b/tests/server/tasks/test_sync_function_task_disabled.py @@ -0,0 +1,267 @@ +""" +Tests that synchronous functions cannot be used as background tasks. + +Docket requires async functions for background execution. FastMCP automatically +disables task support for sync functions, with warnings for explicit task=True. +""" + +from pytest import LogCaptureFixture + +from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.utilities.tests import caplog_for_fastmcp + + +async def test_sync_tool_with_explicit_task_true_warns_and_disables( + caplog: LogCaptureFixture, +): + """Sync tool with task=True logs warning and disables task support.""" + import logging + + with caplog_for_fastmcp(caplog): + caplog.set_level(logging.INFO) + + mcp = FastMCP("test") + + @mcp.tool(task=True) + def sync_tool(x: int) -> int: + """A synchronous tool.""" + logging.getLogger("fastmcp.myserver").info("I came from the tool!") + return x * 2 + + # Should have logged a warning during decoration + assert "task=True but is synchronous" in caplog.text + assert "Disabling task support" in caplog.text + + # Tool should have task=False after being disabled + tool = await mcp.get_tool("sync_tool") + assert tool.task is False + + # Verify execution: even if client requests task=True, should execute immediately + async with Client(mcp) as client: + task = await client.call_tool("sync_tool", {"x": 5}, task=True) + assert task.returned_immediately + result = await task.result() + assert result.data == 10 + + # Should have seen the log from inside the function + assert "I came from the tool!" in caplog.text + + +async def test_sync_tool_with_inherited_task_true_quietly_disables( + caplog: LogCaptureFixture, +): + """Sync tool inheriting task=True from server disables quietly (no warning).""" + import logging + + with caplog_for_fastmcp(caplog): + caplog.set_level(logging.INFO) + + mcp = FastMCP("test", tasks=True) + + @mcp.tool() # Inherits task=True from server + def sync_tool(x: int) -> int: + """A synchronous tool.""" + logging.getLogger("fastmcp.myserver").info("I came from the tool!") + return x * 2 + + # Should NOT have logged a warning (quietly disabled) + assert "task=True but is synchronous" not in caplog.text + + # Tool should have task=False after being disabled + tool = await mcp.get_tool("sync_tool") + assert tool.task is False + + # Verify execution: should execute immediately + async with Client(mcp) as client: + task = await client.call_tool("sync_tool", {"x": 3}, task=True) + assert task.returned_immediately + result = await task.result() + assert result.data == 6 + + # Should have seen the log from inside the function + assert "I came from the tool!" in caplog.text + + +async def test_sync_prompt_with_explicit_task_true_warns_and_disables( + caplog: LogCaptureFixture, +): + """Sync prompt with task=True logs warning and disables task support.""" + import logging + + with caplog_for_fastmcp(caplog): + caplog.set_level(logging.INFO) + + mcp = FastMCP("test") + + @mcp.prompt(task=True) + def sync_prompt() -> str: + """A synchronous prompt.""" + logging.getLogger("fastmcp.myserver").info("I came from the prompt!") + return "Hello" + + # Should have logged a warning during decoration + assert "task=True but is synchronous" in caplog.text + assert "Disabling task support" in caplog.text + + # Prompt should have task=False + prompt = await mcp.get_prompt("sync_prompt") + assert prompt.task is False + + # Verify execution: should execute immediately + async with Client(mcp) as client: + task = await client.get_prompt("sync_prompt", task=True) + assert task.returned_immediately + result = await task.result() + assert "Hello" in str(result) + + # Should have seen the log from inside the function + assert "I came from the prompt!" in caplog.text + + +async def test_sync_prompt_with_inherited_task_true_quietly_disables( + caplog: LogCaptureFixture, +): + """Sync prompt inheriting task=True disables quietly.""" + import logging + + with caplog_for_fastmcp(caplog): + caplog.set_level(logging.INFO) + + mcp = FastMCP("test", tasks=True) + + @mcp.prompt() # Inherits task=True from server + def sync_prompt() -> str: + """A synchronous prompt.""" + logging.getLogger("fastmcp.myserver").info("I came from the prompt!") + return "Hello" + + # Should NOT have logged a warning (quietly disabled) + assert "task=True but is synchronous" not in caplog.text + + # Prompt should have task=False + prompt = await mcp.get_prompt("sync_prompt") + assert prompt.task is False + + # Verify execution: should execute immediately + async with Client(mcp) as client: + task = await client.get_prompt("sync_prompt", task=True) + assert task.returned_immediately + result = await task.result() + assert "Hello" in str(result) + + # Should have seen the log from inside the function + assert "I came from the prompt!" in caplog.text + + +async def test_sync_resource_with_explicit_task_true_warns_and_disables( + caplog: LogCaptureFixture, +): + """Sync resource with task=True logs warning and disables task support.""" + import logging + + with caplog_for_fastmcp(caplog): + caplog.set_level(logging.INFO) + + mcp = FastMCP("test") + + @mcp.resource("test://sync", task=True) + def sync_resource() -> str: + """A synchronous resource.""" + logging.getLogger("fastmcp.myserver").info("I came from the resource!") + return "data" + + # Should have logged a warning during decoration + assert "task=True but is synchronous" in caplog.text + assert "Disabling task support" in caplog.text + + # Resource should have task=False + resource = await mcp._resource_manager.get_resource("test://sync") + assert resource.task is False + + # Verify execution: should execute immediately + async with Client(mcp) as client: + task = await client.read_resource("test://sync", task=True) + assert task.returned_immediately + result = await task.result() + assert "data" in str(result) + + # Should have seen the log from inside the function + assert "I came from the resource!" in caplog.text + + +async def test_sync_resource_with_inherited_task_true_quietly_disables( + caplog: LogCaptureFixture, +): + """Sync resource inheriting task=True disables quietly.""" + import logging + + with caplog_for_fastmcp(caplog): + caplog.set_level(logging.INFO) + + mcp = FastMCP("test", tasks=True) + + @mcp.resource("test://sync") # Inherits task=True from server + def sync_resource() -> str: + """A synchronous resource.""" + logging.getLogger("fastmcp.myserver").info("I came from the resource!") + return "data" + + # Should NOT have logged a warning (quietly disabled) + assert "task=True but is synchronous" not in caplog.text + + # Resource should have task=False + resource = await mcp._resource_manager.get_resource("test://sync") + assert resource.task is False + + # Verify execution: should execute immediately + async with Client(mcp) as client: + task = await client.read_resource("test://sync", task=True) + assert task.returned_immediately + result = await task.result() + assert "data" in str(result) + + # Should have seen the log from inside the function + assert "I came from the resource!" in caplog.text + + +async def test_async_tool_with_task_true_remains_enabled(): + """Async tools with task=True keep task support enabled.""" + mcp = FastMCP("test") + + @mcp.tool(task=True) + async def async_tool(x: int) -> int: + """An async tool.""" + return x * 2 + + # Tool should have task=True + tool = await mcp.get_tool("async_tool") + assert tool.task is True + + +async def test_async_prompt_with_task_true_remains_enabled(): + """Async prompts with task=True keep task support enabled.""" + mcp = FastMCP("test") + + @mcp.prompt(task=True) + async def async_prompt() -> str: + """An async prompt.""" + return "Hello" + + # Prompt should have task=True + prompt = await mcp.get_prompt("async_prompt") + assert prompt.task is True + + +async def test_async_resource_with_task_true_remains_enabled(): + """Async resources with task=True keep task support enabled.""" + mcp = FastMCP("test") + + @mcp.resource("test://async", task=True) + async def async_resource() -> str: + """An async resource.""" + return "data" + + # Resource should have task=True + resource = await mcp._resource_manager.get_resource("test://async") + assert resource.task is True diff --git a/tests/server/tasks/test_task_capabilities.py b/tests/server/tasks/test_task_capabilities.py new file mode 100644 index 0000000000..4a50504606 --- /dev/null +++ b/tests/server/tasks/test_task_capabilities.py @@ -0,0 +1,134 @@ +""" +Tests for SEP-1686 task capabilities declaration. + +Verifies that the server correctly advertises task support based on settings. +""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.utilities.tests import temporary_settings + + +async def test_capabilities_include_tasks_when_enabled(): + """Server capabilities include tasks when enable_tasks=True.""" + with temporary_settings( + enable_docket=True, + enable_tasks=True, + ): + mcp = FastMCP("capability-test") + + @mcp.tool() + def test_tool() -> str: + return "test" + + async with Client(mcp) as client: + # Get server initialization result which includes capabilities + init_result = client.initialize_result + + # Verify tasks capability is present + assert init_result.capabilities.experimental is not None + assert "tasks" in init_result.capabilities.experimental + tasks_cap = init_result.capabilities.experimental["tasks"] + assert tasks_cap == { + "tools": True, + "prompts": True, + "resources": True, + } + + +async def test_capabilities_exclude_tasks_when_disabled(): + """Server capabilities do NOT include tasks when enable_tasks=False.""" + with temporary_settings( + enable_docket=True, + enable_tasks=False, + ): + mcp = FastMCP("capability-test") + + @mcp.tool() + def test_tool() -> str: + return "test" + + async with Client(mcp) as client: + # Get server initialization result + init_result = client.initialize_result + + # Verify tasks capability is NOT present + if init_result.capabilities.experimental: + assert "tasks" not in init_result.capabilities.experimental + + +async def test_capabilities_exclude_tasks_when_docket_disabled(): + """Server capabilities do NOT include tasks when enable_docket=False.""" + with temporary_settings( + enable_docket=False, + enable_tasks=False, + ): + mcp = FastMCP("capability-test") + + @mcp.tool() + def test_tool() -> str: + return "test" + + async with Client(mcp) as client: + # Get server initialization result + init_result = client.initialize_result + + # Verify tasks capability is NOT present + if init_result.capabilities.experimental: + assert "tasks" not in init_result.capabilities.experimental + + +async def test_enable_tasks_requires_enable_docket(): + """Setting enable_tasks=True without enable_docket=True raises error at server startup.""" + with temporary_settings( + enable_docket=False, + enable_tasks=True, + ): + mcp = FastMCP("config-test") + + @mcp.tool() + def test_tool() -> str: + return "test" + + # Should fail when trying to start server (during lifespan) + with pytest.raises(RuntimeError, match="requires.*enable_docket.*enable_tasks"): + async with Client(mcp): + pass # Should never reach here + + +async def test_client_advertises_task_capability_when_enabled(): + """Client advertises experimental.tasks capability when enable_tasks=True.""" + with temporary_settings( + enable_docket=True, + enable_tasks=True, + ): + mcp = FastMCP("client-cap-test") + + @mcp.tool() + def test_tool() -> str: + return "test" + + async with Client(mcp) as client: + # Client should have connected successfully with task capabilities + assert client.initialize_result is not None + + +async def test_client_does_not_advertise_tasks_when_disabled(): + """Client does NOT use custom session when enable_tasks=False.""" + with temporary_settings( + enable_docket=True, + enable_tasks=False, + ): + mcp = FastMCP("no-tasks-client-test") + + @mcp.tool() + def test_tool() -> str: + return "test" + + async with Client(mcp) as client: + # Session should be standard ClientSession, not our custom one + + # The session should be a standard ClientSession + assert type(client.session).__name__ == "ClientSession" diff --git a/tests/server/tasks/test_task_dependencies.py b/tests/server/tasks/test_task_dependencies.py new file mode 100644 index 0000000000..8bc6d46220 --- /dev/null +++ b/tests/server/tasks/test_task_dependencies.py @@ -0,0 +1,272 @@ +"""Tests for dependency injection in background tasks. + +These tests verify that Docket's dependency system works correctly when +user functions are queued as background tasks. Dependencies like CurrentDocket(), +CurrentFastMCP(), and Depends() should be resolved in the worker context. +""" + +from contextlib import asynccontextmanager + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.dependencies import CurrentDocket, CurrentFastMCP, Depends + + +@pytest.fixture +async def dependency_server(): + """Create a FastMCP server with dependency-using background tasks.""" + mcp = FastMCP("dependency-test-server") + + # Track dependency injection + injected_values = [] + + @mcp.tool(task=True) + async def tool_with_docket_dependency(docket=CurrentDocket()) -> str: + """Background tool that uses CurrentDocket dependency.""" + injected_values.append(("docket", docket)) + return f"Docket: {docket is not None}" + + @mcp.tool(task=True) + async def tool_with_server_dependency(server=CurrentFastMCP()) -> str: + """Background tool that uses CurrentFastMCP dependency.""" + injected_values.append(("server", server)) + return f"Server: {server.name}" + + @mcp.tool(task=True) + async def tool_with_custom_dependency( + value: int, multiplier: int = Depends(lambda: 10) + ) -> int: + """Background tool with custom Depends().""" + injected_values.append(("multiplier", multiplier)) + return value * multiplier + + @mcp.tool(task=True) + async def tool_with_multiple_dependencies( + name: str, + docket=CurrentDocket(), + server=CurrentFastMCP(), + ) -> str: + """Background tool with multiple dependencies.""" + injected_values.append(("multi_docket", docket)) + injected_values.append(("multi_server", server)) + return f"{name} on {server.name}" + + @mcp.prompt(task=True) + async def prompt_with_server_dependency(topic: str, server=CurrentFastMCP()) -> str: + """Background prompt that uses CurrentFastMCP dependency.""" + injected_values.append(("prompt_server", server)) + return f"Prompt from {server.name} about {topic}" + + @mcp.resource("file://data.txt", task=True) + async def resource_with_docket_dependency(docket=CurrentDocket()) -> str: + """Background resource that uses CurrentDocket dependency.""" + injected_values.append(("resource_docket", docket)) + return f"Resource via Docket: {docket is not None}" + + # Expose for test assertions + mcp._injected_values = injected_values # type: ignore[attr-defined] + + return mcp + + +async def test_background_tool_receives_docket_dependency(dependency_server): + """Background tools can use CurrentDocket() and it resolves correctly.""" + async with Client(dependency_server) as client: + task = await client.call_tool("tool_with_docket_dependency", {}, task=True) + + # Verify it's background + assert not task.returned_immediately + + # Get result - will execute in Docket worker + result = await task + + # Verify dependency was injected + assert len(dependency_server._injected_values) == 1 + dep_type, dep_value = dependency_server._injected_values[0] + assert dep_type == "docket" + assert dep_value is not None + assert "Docket: True" in result.data + + +async def test_background_tool_receives_server_dependency(dependency_server): + """Background tools can use CurrentFastMCP() and get the actual FastMCP server.""" + dependency_server._injected_values.clear() + + async with Client(dependency_server) as client: + task = await client.call_tool("tool_with_server_dependency", {}, task=True) + + # Verify background execution + assert not task.returned_immediately + + result = await task + + # Check the server instance was injected + assert len(dependency_server._injected_values) == 1 + dep_type, dep_value = dependency_server._injected_values[0] + assert dep_type == "server" + assert dep_value is dependency_server # Same instance! + assert f"Server: {dependency_server.name}" in result.data + + +async def test_background_tool_receives_custom_depends(dependency_server): + """Background tools can use Depends() with custom functions.""" + dependency_server._injected_values.clear() + + async with Client(dependency_server) as client: + task = await client.call_tool( + "tool_with_custom_dependency", {"value": 5}, task=True + ) + + assert not task.returned_immediately + + result = await task + + # Check dependency was resolved + assert len(dependency_server._injected_values) == 1 + dep_type, dep_value = dependency_server._injected_values[0] + assert dep_type == "multiplier" + assert dep_value == 10 + assert result.data == 50 # 5 * 10 + + +async def test_background_tool_with_multiple_dependencies(dependency_server): + """Background tools can have multiple dependencies injected simultaneously.""" + dependency_server._injected_values.clear() + + async with Client(dependency_server) as client: + task = await client.call_tool( + "tool_with_multiple_dependencies", {"name": "test"}, task=True + ) + + assert not task.returned_immediately + + await task + + # Both dependencies should be injected + assert len(dependency_server._injected_values) == 2 + + dep_types = {item[0] for item in dependency_server._injected_values} + assert "multi_docket" in dep_types + assert "multi_server" in dep_types + + # Verify values + server_dep = next( + v for t, v in dependency_server._injected_values if t == "multi_server" + ) + assert server_dep is dependency_server + + +async def test_background_prompt_receives_dependencies(dependency_server): + """Background prompts can use dependency injection.""" + dependency_server._injected_values.clear() + + async with Client(dependency_server) as client: + task = await client.get_prompt( + "prompt_with_server_dependency", {"topic": "AI"}, task=True + ) + + assert not task.returned_immediately + + await task + + # Check dependency was injected + assert len(dependency_server._injected_values) == 1 + dep_type, dep_value = dependency_server._injected_values[0] + assert dep_type == "prompt_server" + assert dep_value is dependency_server + + +async def test_background_resource_receives_dependencies(dependency_server): + """Background resources can use dependency injection.""" + dependency_server._injected_values.clear() + + async with Client(dependency_server) as client: + task = await client.read_resource("file://data.txt", task=True) + + assert not task.returned_immediately + + await task + + # Check dependency was injected + assert len(dependency_server._injected_values) == 1 + dep_type, dep_value = dependency_server._injected_values[0] + assert dep_type == "resource_docket" + assert dep_value is not None + + +async def test_foreground_tool_dependencies_unaffected(dependency_server): + """Synchronous tools (task=False) still get dependencies as before.""" + dependency_server._injected_values.clear() + + @dependency_server.tool() # task=False + async def sync_tool(server=CurrentFastMCP()) -> str: + dependency_server._injected_values.append(("sync_server", server)) + return f"Sync: {server.name}" + + async with Client(dependency_server) as client: + await client.call_tool("sync_tool", {}) + + # Should execute immediately + assert len(dependency_server._injected_values) == 1 + assert dependency_server._injected_values[0][1] is dependency_server + + +async def test_dependency_context_managers_cleaned_up_in_background(): + """Context manager dependencies are properly cleaned up after background task.""" + cleanup_called = [] + + mcp = FastMCP("cleanup-test") + + @asynccontextmanager + async def tracked_connection(): + try: + cleanup_called.append("enter") + yield "connection" + finally: + cleanup_called.append("exit") + + @mcp.tool(task=True) + async def use_connection(name: str, conn: str = Depends(tracked_connection)) -> str: + assert conn == "connection" + assert "enter" in cleanup_called + assert "exit" not in cleanup_called # Still open during execution + return f"Used: {conn}" + + async with Client(mcp) as client: + task = await client.call_tool("use_connection", {"name": "test"}, task=True) + result = await task + + # After task completes, cleanup should have been called + assert cleanup_called == ["enter", "exit"] + assert "Used: connection" in result.data + + +async def test_dependency_errors_propagate_to_task_failure(): + """If dependency resolution fails, the background task should fail.""" + mcp = FastMCP("error-test") + + async def failing_dependency(): + raise ValueError("Dependency failed!") + + @mcp.tool(task=True) + async def tool_with_failing_dep( + value: str, dep: str = Depends(failing_dependency) + ) -> str: + return f"Got: {dep}" + + from fastmcp.exceptions import ToolError + + async with Client(mcp) as client: + task = await client.call_tool( + "tool_with_failing_dep", {"value": "test"}, task=True + ) + + # Task should fail due to dependency error + with pytest.raises(ToolError, match="Failed to resolve dependencies"): + await task.result() + + # Verify it reached failed state + status = await task.status() + assert status.status == "failed" diff --git a/tests/server/tasks/test_task_metadata.py b/tests/server/tasks/test_task_metadata.py new file mode 100644 index 0000000000..c603ff6a67 --- /dev/null +++ b/tests/server/tasks/test_task_metadata.py @@ -0,0 +1,63 @@ +""" +Tests for SEP-1686 related-task metadata in protocol responses. + +Per the spec, all task-related responses MUST include +modelcontextprotocol.io/related-task in _meta. +""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def metadata_server(): + """Create a server for testing metadata.""" + mcp = FastMCP("metadata-test") + + @mcp.tool(task=True) + async def test_tool(value: int) -> int: + return value * 2 + + return mcp + + +async def test_tasks_get_includes_related_task_metadata(metadata_server: FastMCP): + """tasks/get response includes modelcontextprotocol.io/related-task in _meta.""" + async with Client(metadata_server) as client: + # Submit a task + task = await client.call_tool("test_tool", {"value": 5}, task=True) + task_id = task.task_id + + # Get status via client (which uses protocol properly) + status = await client.get_task_status(task_id) + + # GetTaskResult is returned from response with metadata + # Verify the protocol included related-task metadata by checking the response worked + assert status.taskId == task_id + assert status.status in ["working", "completed"] + + +async def test_tasks_result_includes_related_task_metadata(metadata_server: FastMCP): + """tasks/result response includes modelcontextprotocol.io/related-task in _meta.""" + async with Client(metadata_server) as client: + # Submit and complete a task + task = await client.call_tool("test_tool", {"value": 7}, task=True) + result = await task.result() + + # Result should have metadata (added by task.result() or protocol) + # Just verify the result is valid and contains the expected value + assert result.content + assert result.data == 14 # 7 * 2 + + +async def test_tasks_list_includes_related_task_metadata(metadata_server: FastMCP): + """tasks/list response includes modelcontextprotocol.io/related-task in _meta.""" + async with Client(metadata_server) as client: + # List tasks via client (which uses protocol properly) + result = await client.list_tasks() + + # Verify list_tasks works and returns proper structure + assert "tasks" in result + assert isinstance(result["tasks"], list) diff --git a/tests/server/tasks/test_task_methods.py b/tests/server/tasks/test_task_methods.py new file mode 100644 index 0000000000..ba8b6e64dd --- /dev/null +++ b/tests/server/tasks/test_task_methods.py @@ -0,0 +1,174 @@ +""" +Tests for task protocol methods. + +Tests the tasks/get, tasks/result, and tasks/list JSON-RPC protocol methods. +""" + +import asyncio + +import pytest +from mcp.shared.exceptions import McpError + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def endpoint_server(): + """Create a server with background tasks and HTTP transport.""" + mcp = FastMCP("endpoint-test-server") + + @mcp.tool(task=True) # Enable background execution + async def quick_tool(value: int) -> int: + """Returns the value immediately.""" + return value * 2 + + @mcp.tool(task=True) # Enable background execution + async def error_tool() -> str: + """Always raises an error.""" + raise RuntimeError("Task failed!") + + @mcp.tool(task=True) # Enable background execution + async def slow_tool() -> str: + """A slow tool for testing cancellation.""" + await asyncio.sleep(10) + return "done" + + return mcp + + +async def test_tasks_get_endpoint_returns_status(endpoint_server): + """POST /tasks/get returns task status.""" + async with Client(endpoint_server) as client: + # Submit a task + task = await client.call_tool("quick_tool", {"value": 21}, task=True) + + # Check status immediately - should be submitted or working + status = await task.status() + assert status.taskId == task.task_id + assert status.status in ["working", "completed"] + + # Wait for completion + await task.wait(timeout=2.0) + + # Check again - should be completed + status = await task.status() + assert status.status == "completed" + + +async def test_tasks_get_endpoint_includes_poll_interval(endpoint_server): + """Task status includes pollFrequency hint.""" + async with Client(endpoint_server) as client: + task = await client.call_tool("quick_tool", {"value": 42}, task=True) + + status = await task.status() + assert status.pollInterval is not None + assert isinstance(status.pollInterval, int) + + +async def test_tasks_result_endpoint_returns_result_when_completed(endpoint_server): + """POST /tasks/result returns the tool result when completed.""" + async with Client(endpoint_server) as client: + task = await client.call_tool("quick_tool", {"value": 21}, task=True) + + # Wait for completion and get result + result = await task.result() + assert result.data == 42 # 21 * 2 + + +async def test_tasks_result_endpoint_errors_if_not_completed(endpoint_server): + """POST /tasks/result returns error if task not completed yet.""" + # Create a task that won't complete until signaled + completion_signal = asyncio.Event() + + @endpoint_server.tool(task=True) # Enable background execution + async def blocked_tool() -> str: + await completion_signal.wait() + return "done" + + async with Client(endpoint_server) as client: + task = await client.call_tool("blocked_tool", task=True) + + # Try to get result immediately (task still running) + with pytest.raises(Exception): # Should raise or return error + await client.get_task_result(task.task_id) + + # Cleanup - signal completion + completion_signal.set() + + +async def test_tasks_result_endpoint_errors_if_task_not_found(endpoint_server): + """POST /tasks/result returns error for non-existent task.""" + async with Client(endpoint_server) as client: + # Try to get result for non-existent task + with pytest.raises(Exception): + await client.get_task_result("non-existent-task-id") + + +async def test_tasks_result_endpoint_returns_error_for_failed_task(endpoint_server): + """POST /tasks/result returns error information for failed tasks.""" + async with Client(endpoint_server) as client: + task = await client.call_tool("error_tool", task=True) + + # Wait for task to fail + await task.wait(state="failed", timeout=2.0) + + # Getting result should raise or return error info + with pytest.raises(Exception) as exc_info: + await task.result() + + assert ( + "failed" in str(exc_info.value).lower() + or "error" in str(exc_info.value).lower() + ) + + +async def test_tasks_list_endpoint_session_isolation(endpoint_server): + """list_tasks returns only tasks submitted by this client.""" + # Since client tracks tasks locally, this tests client-side tracking + async with Client(endpoint_server) as client: + # Submit multiple tasks (server generates IDs) + tasks = [] + for i in range(3): + task = await client.call_tool("quick_tool", {"value": i}, task=True) + tasks.append(task) + + # Wait for all to complete + for task in tasks: + await task.wait(timeout=2.0) + + # List tasks - should see all 3 + response = await client.list_tasks() + returned_ids = [t["taskId"] for t in response["tasks"]] + task_ids = [t.task_id for t in tasks] + assert len(returned_ids) == 3 + assert all(tid in task_ids for tid in returned_ids) + + +async def test_get_status_nonexistent_task_raises_error(endpoint_server): + """Getting status for nonexistent task raises MCP error (per SEP-1686 SDK behavior).""" + async with Client(endpoint_server) as client: + # Try to get status for task that was never created + # Per SDK implementation: raises ValueError which becomes JSON-RPC error + with pytest.raises(McpError, match="Task nonexistent-task-id not found"): + await client.get_task_status("nonexistent-task-id") + + +async def test_task_cancellation_workflow(endpoint_server): + """Task can be cancelled, transitioning to cancelled state.""" + async with Client(endpoint_server) as client: + # Submit slow task + task = await client.call_tool("slow_tool", {}, task=True) + + # Give it a moment to start + await asyncio.sleep(0.1) + + # Cancel the task + await task.cancel() + + # Give cancellation a moment to process + await asyncio.sleep(0.1) + + # Task should be in cancelled state + status = await task.status() + assert status.status == "cancelled" diff --git a/tests/server/tasks/test_task_prompts.py b/tests/server/tasks/test_task_prompts.py new file mode 100644 index 0000000000..6ca88f4496 --- /dev/null +++ b/tests/server/tasks/test_task_prompts.py @@ -0,0 +1,88 @@ +""" +Tests for SEP-1686 background task support for prompts. + +Tests that prompts with task=True can execute in background. +""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def prompt_server(): + """Create a FastMCP server with task-enabled prompts.""" + mcp = FastMCP("prompt-test-server") + + @mcp.prompt() + async def simple_prompt(topic: str) -> str: + """A simple prompt template.""" + return f"Write about: {topic}" + + @mcp.prompt(task=True) + async def background_prompt(topic: str, depth: str = "detailed") -> str: + """A prompt that can execute in background.""" + return f"Write a {depth} analysis of: {topic}" + + return mcp + + +async def test_synchronous_prompt_unchanged(prompt_server): + """Prompts without task metadata execute synchronously as before.""" + async with Client(prompt_server) as client: + # Regular call without task metadata + result = await client.get_prompt("simple_prompt", {"topic": "AI"}) + + # Should execute immediately and return result + assert "Write about: AI" in str(result) + + +async def test_prompt_with_task_metadata_returns_immediately(prompt_server): + """Prompts with task metadata return immediately with PromptTask object.""" + async with Client(prompt_server) as client: + # Call with task metadata + task = await client.get_prompt("background_prompt", {"topic": "AI"}, task=True) + + # Should return a PromptTask object immediately + from fastmcp.client.client import PromptTask + + assert isinstance(task, PromptTask) + assert isinstance(task.task_id, str) + assert len(task.task_id) > 0 + + +async def test_prompt_task_executes_in_background(prompt_server): + """Prompt task executes via Docket in background.""" + async with Client(prompt_server) as client: + task = await client.get_prompt( + "background_prompt", + {"topic": "Machine Learning", "depth": "comprehensive"}, + task=True, + ) + + # Verify background execution + assert not task.returned_immediately + + # Get the result + result = await task.result() + assert "comprehensive" in result.messages[0].content.text.lower() + + +async def test_graceful_degradation_prompt_without_task_flag(prompt_server): + """Prompts with task=False execute synchronously even with task metadata.""" + + @prompt_server.prompt(task=False) # Explicitly disable task support + async def sync_only_prompt(topic: str) -> str: + return f"Sync prompt: {topic}" + + async with Client(prompt_server) as client: + # Try to call with task metadata - should execute synchronously + task = await client.get_prompt("sync_only_prompt", {"topic": "test"}, task=True) + + # Should have executed immediately (graceful degradation) + assert task.returned_immediately + + # Can get result without waiting + result = await task.result() + assert "Sync prompt: test" in result.messages[0].content.text diff --git a/tests/server/tasks/test_task_protocol.py b/tests/server/tasks/test_task_protocol.py new file mode 100644 index 0000000000..09c0e97120 --- /dev/null +++ b/tests/server/tasks/test_task_protocol.py @@ -0,0 +1,83 @@ +""" +Tests for SEP-1686 protocol-level task handling. + +Generic protocol tests that use tools as test fixtures. +Tests metadata, notifications, and error handling at the protocol level. +""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def task_enabled_server(): + """Create a FastMCP server with task-enabled tools.""" + mcp = FastMCP("task-test-server") + + @mcp.tool(task=True) + async def simple_tool(message: str) -> str: + """A simple tool for testing.""" + return f"Processed: {message}" + + @mcp.tool(task=True) + async def failing_tool() -> str: + """A tool that always fails.""" + raise ValueError("This tool always fails") + + assert mcp._support_tasks_by_default + + return mcp + + +async def test_task_metadata_includes_task_id_and_ttl(task_enabled_server): + """Task metadata properly includes server-generated taskId and ttl.""" + async with Client(task_enabled_server) as client: + # Submit with specific ttl (server generates task ID) + task = await client.call_tool( + "simple_tool", + {"message": "test"}, + task=True, + ttl=30000, + ) + assert task + assert not task.returned_immediately + + # Server should have generated a task ID + assert task.task_id is not None + assert isinstance(task.task_id, str) + + +async def test_task_notification_sent_after_submission(task_enabled_server): + """Server sends notifications/tasks/created after task submission.""" + + @task_enabled_server.tool(task=True) + async def background_tool(message: str) -> str: + return f"Processed: {message}" + + async with Client(task_enabled_server) as client: + task = await client.call_tool("background_tool", {"message": "test"}, task=True) + assert task + assert not task.returned_immediately + + # Verify we can query the task + status = await task.status() + assert status.taskId == task.task_id + + +async def test_failed_task_stores_error(task_enabled_server): + """Failed tasks store the error in results.""" + + @task_enabled_server.tool(task=True) + async def failing_task_tool() -> str: + raise ValueError("This tool always fails") + + async with Client(task_enabled_server) as client: + task = await client.call_tool("failing_task_tool", task=True) + assert task + assert not task.returned_immediately + + # Wait for task to fail + status = await task.wait(state="failed", timeout=2.0) + assert status.status == "failed" diff --git a/tests/server/tasks/test_task_resources.py b/tests/server/tasks/test_task_resources.py new file mode 100644 index 0000000000..308e97a6f1 --- /dev/null +++ b/tests/server/tasks/test_task_resources.py @@ -0,0 +1,105 @@ +""" +Tests for SEP-1686 background task support for resources. + +Tests that resources with task=True can execute in background. +""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def resource_server(): + """Create a FastMCP server with task-enabled resources.""" + mcp = FastMCP("resource-test-server") + + @mcp.resource("file://data.txt") + async def simple_resource() -> str: + """A simple resource.""" + return "Simple content" + + @mcp.resource("file://large.txt", task=True) + async def background_resource() -> str: + """A resource that can execute in background.""" + return "Large file content that takes time to load" + + @mcp.resource("file://user/{user_id}/data.json", task=True) + async def template_resource(user_id: str) -> str: + """A resource template that can execute in background.""" + return f'{{"userId": "{user_id}", "data": "value"}}' + + return mcp + + +async def test_synchronous_resource_unchanged(resource_server): + """Resources without task metadata execute synchronously as before.""" + async with Client(resource_server) as client: + # Regular call without task metadata + result = await client.read_resource("file://data.txt") + + # Should execute immediately and return result + assert "Simple content" in str(result) + + +async def test_resource_with_task_metadata_returns_immediately(resource_server): + """Resources with task metadata return immediately with ResourceTask object.""" + async with Client(resource_server) as client: + # Call with task metadata + task = await client.read_resource("file://large.txt", task=True) + + # Should return a ResourceTask object immediately + from fastmcp.client.client import ResourceTask + + assert isinstance(task, ResourceTask) + assert isinstance(task.task_id, str) + assert len(task.task_id) > 0 + + +async def test_resource_task_executes_in_background(resource_server): + """Resource task executes via Docket in background.""" + async with Client(resource_server) as client: + task = await client.read_resource("file://large.txt", task=True) + + # Verify background execution + assert not task.returned_immediately + + # Get the result + result = await task.result() + assert len(result) > 0 + assert result[0].text == "Large file content that takes time to load" + + +async def test_resource_template_with_task(resource_server): + """Resource templates with task=True execute in background.""" + async with Client(resource_server) as client: + task = await client.read_resource("file://user/123/data.json", task=True) + + # Verify background execution + assert not task.returned_immediately + + # Get the result + result = await task.result() + assert '"userId": "123"' in result[0].text + + +async def test_graceful_degradation_resource_without_task_flag(resource_server): + """Resources with task=False execute synchronously even with task metadata.""" + + @resource_server.resource( + "file://sync.txt", task=False + ) # Explicitly disable task support + async def sync_only_resource() -> str: + return "Sync content" + + async with Client(resource_server) as client: + # Try to call with task metadata - should execute synchronously + task = await client.read_resource("file://sync.txt", task=True) + + # Should have executed immediately (graceful degradation) + assert task.returned_immediately + + # Can get result without waiting + result = await task.result() + assert "Sync content" in result[0].text diff --git a/tests/server/tasks/test_task_return_types.py b/tests/server/tasks/test_task_return_types.py new file mode 100644 index 0000000000..4b6ed11b71 --- /dev/null +++ b/tests/server/tasks/test_task_return_types.py @@ -0,0 +1,660 @@ +""" +Tests to verify all return types work identically with task=True. + +These tests ensure that enabling background task support doesn't break +existing functionality - any tool/prompt/resource should work exactly +the same whether task=True or task=False. +""" + +from dataclasses import dataclass +from datetime import datetime +from pathlib import Path +from typing import Any +from uuid import UUID + +import pytest +from pydantic import BaseModel +from typing_extensions import TypedDict + +from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.utilities.types import Audio, File, Image + + +class UserData(BaseModel): + """Example structured output.""" + + name: str + age: int + active: bool + + +@pytest.fixture +async def return_type_server(): + """Server with tools that return various types.""" + mcp = FastMCP("return-type-test") + + # String return + @mcp.tool(task=True) + async def return_string() -> str: + return "Hello, World!" + + # Integer return + @mcp.tool(task=True) + async def return_int() -> int: + return 42 + + # Float return + @mcp.tool(task=True) + async def return_float() -> float: + return 3.14159 + + # Boolean return + @mcp.tool(task=True) + async def return_bool() -> bool: + return True + + # Dict return + @mcp.tool(task=True) + async def return_dict() -> dict[str, int]: + return {"count": 100, "total": 500} + + # List return + @mcp.tool(task=True) + async def return_list() -> list[str]: + return ["apple", "banana", "cherry"] + + # BaseModel return (structured output) + @mcp.tool(task=True) + async def return_model() -> UserData: + return UserData(name="Alice", age=30, active=True) + + # None/null return + @mcp.tool(task=True) + async def return_none() -> None: + return None + + return mcp + + +@pytest.mark.parametrize( + "tool_name,expected_type,expected_value", + [ + ("return_string", str, "Hello, World!"), + ("return_int", int, 42), + ("return_float", float, 3.14159), + ("return_bool", bool, True), + ("return_dict", dict, {"count": 100, "total": 500}), + ("return_list", list, ["apple", "banana", "cherry"]), + ("return_none", type(None), None), + ], +) +async def test_task_basic_types( + return_type_server: FastMCP, + tool_name: str, + expected_type: type, + expected_value: Any, +): + """Task mode returns basic types correctly.""" + async with Client(return_type_server) as client: + task = await client.call_tool(tool_name, task=True) + result = await task + assert isinstance(result.data, expected_type) + assert result.data == expected_value + + +async def test_task_model_return(return_type_server): + """Task mode returns same BaseModel (as dict) as immediate mode.""" + async with Client(return_type_server) as client: + task = await client.call_tool("return_model", task=True) + result = await task + + # Client deserializes to dynamic class (type name lost with title pruning) + assert result.data.__class__.__name__ == "Root" + assert result.data.name == "Alice" + assert result.data.age == 30 + assert result.data.active is True + + +async def test_task_vs_immediate_equivalence(return_type_server): + """Verify task mode and immediate mode return identical results.""" + async with Client(return_type_server) as client: + # Test a few types to verify equivalence + tools_to_test = ["return_string", "return_int", "return_dict"] + + for tool_name in tools_to_test: + # Call as task + task = await client.call_tool(tool_name, task=True) + task_result = await task + + # Call immediately (server should decline background execution when no task meta) + immediate_result = await client.call_tool(tool_name) + + # Results should be identical + assert task_result.data == immediate_result.data, ( + f"Mismatch for {tool_name}" + ) + + +@pytest.fixture +async def prompt_return_server(): + """Server with prompts that return various message structures.""" + mcp = FastMCP("prompt-return-test") + + @mcp.prompt(task=True) + async def single_message_prompt() -> str: + """Return a single string message.""" + return "Single message content" + + @mcp.prompt(task=True) + async def multi_message_prompt() -> list[str]: + """Return multiple messages.""" + return [ + "First message", + "Second message", + "Third message", + ] + + return mcp + + +async def test_prompt_task_single_message(prompt_return_server): + """Prompt task returns single message correctly.""" + async with Client(prompt_return_server) as client: + task = await client.get_prompt("single_message_prompt", task=True) + result = await task + + assert len(result.messages) == 1 + assert result.messages[0].content.text == "Single message content" + + +async def test_prompt_task_multiple_messages(prompt_return_server): + """Prompt task returns multiple messages correctly.""" + async with Client(prompt_return_server) as client: + task = await client.get_prompt("multi_message_prompt", task=True) + result = await task + + assert len(result.messages) == 3 + assert result.messages[0].content.text == "First message" + assert result.messages[1].content.text == "Second message" + assert result.messages[2].content.text == "Third message" + + +@pytest.fixture +async def resource_return_server(): + """Server with resources that return various content types.""" + mcp = FastMCP("resource-return-test") + + @mcp.resource("text://simple", task=True) + def simple_text() -> str: + """Return simple text content.""" + return "Simple text resource" + + @mcp.resource("data://json", task=True) + def json_data() -> dict[str, Any]: + """Return JSON-like data.""" + return {"key": "value", "count": 123} + + return mcp + + +async def test_resource_task_text_content(resource_return_server): + """Resource task returns text content correctly.""" + async with Client(resource_return_server) as client: + task = await client.read_resource("text://simple", task=True) + contents = await task + + assert len(contents) == 1 + assert contents[0].text == "Simple text resource" + + +async def test_resource_task_json_content(resource_return_server): + """Resource task returns structured content correctly.""" + async with Client(resource_return_server) as client: + task = await client.read_resource("data://json", task=True) + contents = await task + + # Content should be JSON serialized + assert len(contents) == 1 + import json + + data = json.loads(contents[0].text) + assert data == {"key": "value", "count": 123} + + +# ============================================================================== +# Binary & Special Types +# ============================================================================== + + +@pytest.fixture +async def binary_type_server(): + """Server with tools returning binary and special types.""" + mcp = FastMCP("binary-test") + + @mcp.tool(task=True) + async def return_bytes() -> bytes: + return b"Hello bytes!" + + @mcp.tool(task=True) + async def return_uuid() -> UUID: + return UUID("12345678-1234-5678-1234-567812345678") + + @mcp.tool(task=True) + async def return_path() -> Path: + return Path("/tmp/test.txt") + + @mcp.tool(task=True) + async def return_datetime() -> datetime: + return datetime(2025, 11, 5, 12, 30, 45) + + return mcp + + +@pytest.mark.parametrize( + "tool_name,expected_type,assertion_fn", + [ + ( + "return_bytes", + str, + lambda r: "Hello bytes!" in r.data or "SGVsbG8gYnl0ZXMh" in r.data, + ), + ( + "return_uuid", + str, + lambda r: r.data == "12345678-1234-5678-1234-567812345678", + ), + ( + "return_path", + str, + lambda r: "tmp" in r.data and "test.txt" in r.data, + ), + ( + "return_datetime", + datetime, + lambda r: r.data == datetime(2025, 11, 5, 12, 30, 45), + ), + ], +) +async def test_task_binary_types( + binary_type_server: FastMCP, + tool_name: str, + expected_type: type, + assertion_fn: Any, +): + """Task mode handles binary and special types.""" + async with Client(binary_type_server) as client: + task = await client.call_tool(tool_name, task=True) + result = await task + assert isinstance(result.data, expected_type) + assert assertion_fn(result) + + +# ============================================================================== +# Collection Varieties +# ============================================================================== + + +@pytest.fixture +async def collection_server(): + """Server with tools returning various collection types.""" + mcp = FastMCP("collection-test") + + @mcp.tool(task=True) + async def return_tuple() -> tuple[int, str, bool]: + return (42, "hello", True) + + @mcp.tool(task=True) + async def return_set() -> set[int]: + return {1, 2, 3} + + @mcp.tool(task=True) + async def return_empty_list() -> list[str]: + return [] + + @mcp.tool(task=True) + async def return_empty_dict() -> dict[str, Any]: + return {} + + return mcp + + +@pytest.mark.parametrize( + "tool_name,expected_type,expected_value", + [ + ("return_tuple", list, [42, "hello", True]), + ("return_set", set, {1, 2, 3}), + ("return_empty_list", list, []), + ], +) +async def test_task_collection_types( + collection_server: FastMCP, + tool_name: str, + expected_type: type, + expected_value: Any, +): + """Task mode handles collection types.""" + async with Client(collection_server) as client: + task = await client.call_tool(tool_name, task=True) + result = await task + assert isinstance(result.data, expected_type) + assert result.data == expected_value + + +async def test_task_empty_dict_return(collection_server): + """Task mode handles empty dict return.""" + async with Client(collection_server) as client: + task = await client.call_tool("return_empty_dict", task=True) + result = await task + # Empty structured content becomes None in data + assert result.data is None + # But structured content is still {} + assert result.structured_content == {} + + +# ============================================================================== +# Media Types (Image, Audio, File) +# ============================================================================== + + +@pytest.fixture +async def media_server(tmp_path): + """Server with tools returning media types.""" + mcp = FastMCP("media-test") + + # Create test files + test_image = tmp_path / "test.png" + test_image.write_bytes(b"\x89PNG\r\n\x1a\n" + b"fake png data") + + test_audio = tmp_path / "test.mp3" + test_audio.write_bytes(b"ID3" + b"fake mp3 data") + + test_file = tmp_path / "test.txt" + test_file.write_text("test file content") + + @mcp.tool(task=True) + async def return_image_path() -> Image: + return Image(path=str(test_image)) + + @mcp.tool(task=True) + async def return_image_data() -> Image: + return Image(data=test_image.read_bytes(), format="png") + + @mcp.tool(task=True) + async def return_audio() -> Audio: + return Audio(path=str(test_audio)) + + @mcp.tool(task=True) + async def return_file() -> File: + return File(path=str(test_file)) + + return mcp + + +@pytest.mark.parametrize( + "tool_name,assertion_fn", + [ + ( + "return_image_path", + lambda r: len(r.content) == 1 and r.content[0].type == "image", + ), + ( + "return_image_data", + lambda r: len(r.content) == 1 + and r.content[0].type == "image" + and r.content[0].mimeType == "image/png", + ), + ( + "return_audio", + lambda r: len(r.content) == 1 and r.content[0].type in ["text", "audio"], + ), + ( + "return_file", + lambda r: len(r.content) == 1 and r.content[0].type == "resource", + ), + ], +) +async def test_task_media_types( + media_server: FastMCP, + tool_name: str, + assertion_fn: Any, +): + """Task mode handles media types (Image, Audio, File).""" + async with Client(media_server) as client: + task = await client.call_tool(tool_name, task=True) + result = await task + assert assertion_fn(result) + + +# ============================================================================== +# Structured Types (TypedDict, dataclass, unions) +# ============================================================================== + + +class PersonTypedDict(TypedDict): + """Example TypedDict.""" + + name: str + age: int + + +@dataclass +class PersonDataclass: + """Example dataclass.""" + + name: str + age: int + + +@pytest.fixture +async def structured_type_server(): + """Server with tools returning structured types.""" + mcp = FastMCP("structured-test") + + @mcp.tool(task=True) + async def return_typeddict() -> PersonTypedDict: + return {"name": "Bob", "age": 25} + + @mcp.tool(task=True) + async def return_dataclass() -> PersonDataclass: + return PersonDataclass(name="Charlie", age=35) + + @mcp.tool(task=True) + async def return_union() -> str | int: + return "string value" + + @mcp.tool(task=True) + async def return_union_int() -> str | int: + return 123 + + @mcp.tool(task=True) + async def return_optional() -> str | None: + return "has value" + + @mcp.tool(task=True) + async def return_optional_none() -> str | None: + return None + + return mcp + + +@pytest.mark.parametrize( + "tool_name,expected_name,expected_age", + [ + ("return_typeddict", "Bob", 25), + ("return_dataclass", "Charlie", 35), + ], +) +async def test_task_structured_dict_types( + structured_type_server: FastMCP, + tool_name: str, + expected_name: str, + expected_age: int, +): + """Task mode handles TypedDict and dataclass returns.""" + async with Client(structured_type_server) as client: + task = await client.call_tool(tool_name, task=True) + result = await task + # Both deserialize to dynamic Root class + assert result.data.name == expected_name + assert result.data.age == expected_age + + +@pytest.mark.parametrize( + "tool_name,expected_type,expected_value", + [ + ("return_union", str, "string value"), + ("return_union_int", int, 123), + ], +) +async def test_task_union_types( + structured_type_server: FastMCP, + tool_name: str, + expected_type: type, + expected_value: Any, +): + """Task mode handles union type branches.""" + async with Client(structured_type_server) as client: + task = await client.call_tool(tool_name, task=True) + result = await task + assert isinstance(result.data, expected_type) + assert result.data == expected_value + + +@pytest.mark.parametrize( + "tool_name,expected_type,expected_value", + [ + ("return_optional", str, "has value"), + ("return_optional_none", type(None), None), + ], +) +async def test_task_optional_types( + structured_type_server: FastMCP, + tool_name: str, + expected_type: type, + expected_value: Any, +): + """Task mode handles Optional types.""" + async with Client(structured_type_server) as client: + task = await client.call_tool(tool_name, task=True) + result = await task + assert isinstance(result.data, expected_type) + assert result.data == expected_value + + +# ============================================================================== +# MCP Content Blocks +# ============================================================================== + + +@pytest.fixture +async def mcp_content_server(tmp_path): + """Server with tools returning MCP content blocks.""" + import base64 + + from mcp.types import ( + AnyUrl, + EmbeddedResource, + ImageContent, + ResourceLink, + TextContent, + TextResourceContents, + ) + + mcp = FastMCP("content-test") + + test_image = tmp_path / "content.png" + test_image.write_bytes(b"\x89PNG\r\n\x1a\n" + b"content") + + @mcp.tool(task=True) + async def return_text_content() -> TextContent: + return TextContent(type="text", text="Direct text content") + + @mcp.tool(task=True) + async def return_image_content() -> ImageContent: + return ImageContent( + type="image", + data=base64.b64encode(test_image.read_bytes()).decode(), + mimeType="image/png", + ) + + @mcp.tool(task=True) + async def return_embedded_resource() -> EmbeddedResource: + return EmbeddedResource( + type="resource", + resource=TextResourceContents( + uri=AnyUrl("test://resource"), text="embedded" + ), + ) + + @mcp.tool(task=True) + async def return_resource_link() -> ResourceLink: + return ResourceLink( + type="resource_link", uri=AnyUrl("test://linked"), name="Test Resource" + ) + + @mcp.tool(task=True) + async def return_mixed_content() -> list[TextContent | ImageContent]: + return [ + TextContent(type="text", text="First block"), + ImageContent( + type="image", + data=base64.b64encode(test_image.read_bytes()).decode(), + mimeType="image/png", + ), + TextContent(type="text", text="Third block"), + ] + + return mcp + + +@pytest.mark.parametrize( + "tool_name,assertion_fn", + [ + ( + "return_text_content", + lambda r: len(r.content) == 1 + and r.content[0].type == "text" + and r.content[0].text == "Direct text content", + ), + ( + "return_image_content", + lambda r: len(r.content) == 1 + and r.content[0].type == "image" + and r.content[0].mimeType == "image/png", + ), + ( + "return_embedded_resource", + lambda r: len(r.content) == 1 and r.content[0].type == "resource", + ), + ( + "return_resource_link", + lambda r: len(r.content) == 1 + and r.content[0].type == "resource_link" + and str(r.content[0].uri) == "test://linked", + ), + ], +) +async def test_task_mcp_content_types( + mcp_content_server: FastMCP, + tool_name: str, + assertion_fn: Any, +): + """Task mode handles MCP content block types.""" + async with Client(mcp_content_server) as client: + task = await client.call_tool(tool_name, task=True) + result = await task + assert assertion_fn(result) + + +async def test_task_mixed_content_return(mcp_content_server): + """Task mode handles mixed content list return.""" + async with Client(mcp_content_server) as client: + task = await client.call_tool("return_mixed_content", task=True) + result = await task + assert len(result.content) == 3 + assert result.content[0].type == "text" + assert result.content[0].text == "First block" + assert result.content[1].type == "image" + assert result.content[2].type == "text" + assert result.content[2].text == "Third block" diff --git a/tests/server/tasks/test_task_security.py b/tests/server/tasks/test_task_security.py new file mode 100644 index 0000000000..88af3aa7d6 --- /dev/null +++ b/tests/server/tasks/test_task_security.py @@ -0,0 +1,47 @@ +""" +Tests for session-based task ID isolation (CRITICAL SECURITY). + +Ensures that tasks are properly scoped to sessions and clients cannot +access each other's tasks. +""" + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def task_server(): + """Create a server with background tasks enabled.""" + mcp = FastMCP("security-test-server") + + @mcp.tool(task=True) # Enable background execution + async def secret_tool(data: str) -> str: + """A tool that processes sensitive data.""" + return f"Secret result: {data}" + + return mcp + + +async def test_same_session_can_access_all_its_tasks(task_server): + """A single session can access all tasks it created.""" + async with Client(task_server) as client: + # Submit multiple tasks + task1 = await client.call_tool( + "secret_tool", {"data": "first"}, task=True, task_id="task-1" + ) + task2 = await client.call_tool( + "secret_tool", {"data": "second"}, task=True, task_id="task-2" + ) + + # Wait for both to complete + await task1.wait(timeout=2.0) + await task2.wait(timeout=2.0) + + # Should be able to access both + result1 = await task1.result() + result2 = await task2.result() + + assert "first" in str(result1.data) + assert "second" in str(result2.data) diff --git a/tests/server/tasks/test_task_status_notifications.py b/tests/server/tasks/test_task_status_notifications.py new file mode 100644 index 0000000000..98d333ca97 --- /dev/null +++ b/tests/server/tasks/test_task_status_notifications.py @@ -0,0 +1,160 @@ +""" +Tests for notifications/tasks/status subscription mechanism (SEP-1686 lines 436-444). + +Per the spec, servers MAY send notifications/tasks/status when task state changes. +This is an optional optimization that reduces client polling frequency. + +These tests verify that the subscription mechanism works correctly without breaking +existing functionality. Notification delivery is best-effort and clients MUST NOT +rely on receiving them. +""" + +import asyncio + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def notification_server(): + """Create a server for testing task status notifications.""" + mcp = FastMCP("notification-test") + + @mcp.tool(task=True) + async def quick_task(value: int) -> int: + """Quick task that completes immediately.""" + return value * 2 + + @mcp.tool(task=True) + async def slow_task(duration: float = 0.1) -> str: + """Slow task for testing working status.""" + await asyncio.sleep(duration) + return "completed" + + @mcp.tool(task=True) + async def failing_task() -> str: + """Task that always fails.""" + raise ValueError("Task failed intentionally") + + @mcp.prompt(task=True) + async def test_prompt(name: str) -> str: + """Test prompt for background execution.""" + await asyncio.sleep(0.05) + return f"Hello, {name}!" + + @mcp.resource("test://resource", task=True) + async def test_resource() -> str: + """Test resource for background execution.""" + await asyncio.sleep(0.05) + return "resource content" + + return mcp + + +async def test_subscription_spawned_for_tool_task(notification_server: FastMCP): + """Subscription task is spawned when tool task is created.""" + async with Client(notification_server) as client: + # Create task - should spawn subscription + task = await client.call_tool("quick_task", {"value": 5}, task=True) + + # Task should complete normally + result = await task + assert result.data == 10 + + # Subscription should clean up automatically + # (No way to directly test, but shouldn't cause issues) + + +async def test_subscription_handles_task_completion(notification_server: FastMCP): + """Subscription properly handles task completion and cleanup.""" + async with Client(notification_server) as client: + # Multiple tasks should each get their own subscription + task1 = await client.call_tool("quick_task", {"value": 1}, task=True) + task2 = await client.call_tool("quick_task", {"value": 2}, task=True) + task3 = await client.call_tool("quick_task", {"value": 3}, task=True) + + # All should complete successfully + result1 = await task1 + result2 = await task2 + result3 = await task3 + + assert result1.data == 2 + assert result2.data == 4 + assert result3.data == 6 + + # Subscriptions should all clean up + # Give them a moment + await asyncio.sleep(0.1) + + +async def test_subscription_handles_task_failure(notification_server: FastMCP): + """Subscription properly handles task failure.""" + async with Client(notification_server) as client: + task = await client.call_tool("failing_task", {}, task=True) + + # Task should fail + with pytest.raises(Exception): + await task + + # Subscription should handle failure and clean up + await asyncio.sleep(0.1) + + +async def test_subscription_for_prompt_tasks(notification_server: FastMCP): + """Subscriptions work for prompt tasks.""" + async with Client(notification_server) as client: + task = await client.get_prompt("test_prompt", {"name": "World"}, task=True) + + result = await task + # Prompt result has messages + assert result + + # Subscription should clean up + await asyncio.sleep(0.1) + + +async def test_subscription_for_resource_tasks(notification_server: FastMCP): + """Subscriptions work for resource tasks.""" + async with Client(notification_server) as client: + task = await client.read_resource("test://resource", task=True) + + result = await task + assert result # Resource contents + + # Subscription should clean up + await asyncio.sleep(0.1) + + +async def test_subscriptions_cleanup_on_session_disconnect( + notification_server: FastMCP, +): + """Subscriptions are cleaned up when session disconnects.""" + # Start session and create task + async with Client(notification_server) as client: + task = await client.call_tool("slow_task", {"duration": 1.0}, task=True) + task_id = task.task_id + # Disconnect before task completes (session __aexit__ cancels subscriptions) + + # Session is now closed, subscription should be cancelled + # Task continues in Docket but notification subscription is gone + # This test passing means no crash occurred during cleanup + assert task_id # Task was created + + +async def test_multiple_concurrent_subscriptions(notification_server: FastMCP): + """Multiple concurrent tasks each have their own subscription.""" + async with Client(notification_server) as client: + # Start many tasks concurrently + tasks = [] + for i in range(10): + task = await client.call_tool("quick_task", {"value": i}, task=True) + tasks.append(task) + + # All should complete + results = await asyncio.gather(*tasks) + assert len(results) == 10 + + # All subscriptions should clean up + await asyncio.sleep(0.1) diff --git a/tests/server/tasks/test_task_tools.py b/tests/server/tasks/test_task_tools.py new file mode 100644 index 0000000000..6d5056960c --- /dev/null +++ b/tests/server/tasks/test_task_tools.py @@ -0,0 +1,103 @@ +""" +Tests for server-side tool task behavior. + +Tests tool-specific task handling, parallel to test_task_prompts.py +and test_task_resources.py. +""" + +import asyncio + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def tool_server(): + """Create a FastMCP server with task-enabled tools.""" + mcp = FastMCP("tool-task-server") + + @mcp.tool(task=True) + async def simple_tool(message: str) -> str: + """A simple tool for testing.""" + return f"Processed: {message}" + + @mcp.tool(task=False) + async def sync_only_tool(message: str) -> str: + """Tool with task=False.""" + return f"Sync: {message}" + + return mcp + + +async def test_synchronous_tool_call_unchanged(tool_server): + """Tools without task metadata execute synchronously as before.""" + async with Client(tool_server) as client: + # Regular call without task metadata + result = await client.call_tool("simple_tool", {"message": "hello"}) + + # Should execute immediately and return result + assert "Processed: hello" in str(result) + + +async def test_tool_with_task_metadata_returns_immediately(tool_server): + """Tools with task metadata return immediately with ToolTask object.""" + async with Client(tool_server) as client: + # Call with task metadata + task = await client.call_tool("simple_tool", {"message": "test"}, task=True) + assert task + assert not task.returned_immediately + + from fastmcp.client.client import ToolTask + + assert isinstance(task, ToolTask) + assert isinstance(task.task_id, str) + assert len(task.task_id) > 0 + + +async def test_tool_task_executes_in_background(tool_server): + """Tool task is submitted to Docket and executes in background.""" + execution_started = asyncio.Event() + execution_completed = asyncio.Event() + + @tool_server.tool(task=True) + async def coordinated_tool() -> str: + """Tool with coordination points.""" + execution_started.set() + await execution_completed.wait() + return "completed" + + async with Client(tool_server) as client: + task = await client.call_tool("coordinated_tool", task=True) + assert task + assert not task.returned_immediately + + # Wait for execution to start + await asyncio.wait_for(execution_started.wait(), timeout=2.0) + + # Task should still be working + status = await task.status() + assert status.status in ["working"] + + # Signal completion + execution_completed.set() + await task.wait(timeout=2.0) + + result = await task.result() + assert result.data == "completed" + + +async def test_graceful_degradation_tool_without_task_flag(tool_server): + """Tools with task=False execute synchronously even with task metadata.""" + async with Client(tool_server) as client: + # Try to call with task metadata - server should execute synchronously + task = await client.call_tool("sync_only_tool", {"message": "test"}, task=True) + assert task + assert task.returned_immediately + + result = await task.result() + assert "Sync: test" in str(result) + + status = await task.status() + assert status.status == "completed" diff --git a/tests/server/tasks/test_task_ttl.py b/tests/server/tasks/test_task_ttl.py new file mode 100644 index 0000000000..769fafffa3 --- /dev/null +++ b/tests/server/tasks/test_task_ttl.py @@ -0,0 +1,87 @@ +""" +Tests for SEP-1686 ttl parameter handling. + +Per the spec, servers MUST return ttl in all tasks/get responses, +and results should be retained for ttl milliseconds after completion. +""" + +import asyncio + +import pytest + +from fastmcp import FastMCP +from fastmcp.client import Client + + +@pytest.fixture +async def keepalive_server(): + """Create a server for testing ttl behavior.""" + mcp = FastMCP("keepalive-test") + + @mcp.tool(task=True) + async def quick_task(value: int) -> int: + return value * 2 + + @mcp.tool(task=True) + async def slow_task() -> str: + await asyncio.sleep(1) + return "done" + + return mcp + + +async def test_keepalive_returned_in_submitted_state(keepalive_server: FastMCP): + """ttl is returned in tasks/get even when task is submitted/working.""" + async with Client(keepalive_server) as client: + # Submit task with explicit ttl + task = await client.call_tool( + "slow_task", + {}, + task=True, + ttl=30000, # 30 seconds (client-requested) + ) + + # Check status immediately - should be submitted or working + status = await task.status() + assert status.status in ["working"] + + # ttl should be present per spec (MUST return in all responses) + # TODO: Docket uses a global execution_ttl for all tasks, not per-task TTLs. + # The spec allows servers to override client-requested TTL (line 431). + # FastMCP returns the server's actual global TTL (60000ms default from Docket). + # If Docket gains per-task TTL support, update this to verify client-requested TTL is respected. + assert status.ttl == 60000 # Server's global TTL, not client-requested 30000 + + +async def test_keepalive_returned_in_completed_state(keepalive_server: FastMCP): + """ttl is returned in tasks/get after task completes.""" + async with Client(keepalive_server) as client: + # Submit and complete task + task = await client.call_tool( + "quick_task", + {"value": 5}, + task=True, + ttl=45000, # Client-requested TTL + ) + await task.wait(timeout=2.0) + + # Check status - should be completed + status = await task.status() + assert status.status == "completed" + + # TODO: Docket uses global execution_ttl, not per-task TTLs. + # Server returns its global TTL (60000ms), not the client-requested 45000ms. + # This is spec-compliant - servers MAY override requested TTL (spec line 431). + assert status.ttl == 60000 # Server's global TTL, not client-requested 45000 + + +async def test_default_keepalive_when_not_specified(keepalive_server: FastMCP): + """Default ttl is used when client doesn't specify.""" + async with Client(keepalive_server) as client: + # Submit without explicit ttl + task = await client.call_tool("quick_task", {"value": 3}, task=True) + await task.wait(timeout=2.0) + + status = await task.status() + # Should have default ttl (60000ms = 60 seconds) + assert status.ttl == 60000 diff --git a/tests/server/test_dependencies.py b/tests/server/test_dependencies.py new file mode 100644 index 0000000000..208ae31066 --- /dev/null +++ b/tests/server/test_dependencies.py @@ -0,0 +1,735 @@ +"""Tests for Docket-style dependency injection in FastMCP.""" + +from contextlib import asynccontextmanager, contextmanager + +import pytest +from mcp.types import TextContent, TextResourceContents + +from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.dependencies import CurrentContext, Depends +from fastmcp.server.context import Context + +HUZZAH = "huzzah!" + + +class Connection: + """Test connection that tracks whether it's currently open.""" + + def __init__(self): + self.is_open = False + + async def __aenter__(self): + self.is_open = True + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + self.is_open = False + + +@asynccontextmanager +async def get_connection(): + """Dependency that provides an open connection.""" + async with Connection() as conn: + yield conn + + +@pytest.fixture +def mcp(): + """Create a FastMCP server for testing.""" + return FastMCP("test-server") + + +async def test_depends_with_sync_function(mcp: FastMCP): + """Test that Depends works with sync dependency functions.""" + + def get_config() -> dict[str, str]: + return {"api_key": "secret123", "endpoint": "https://api.example.com"} + + @mcp.tool() + def fetch_data(query: str, config: dict[str, str] = Depends(get_config)) -> str: + return ( + f"Fetching '{query}' from {config['endpoint']} with key {config['api_key']}" + ) + + async with Client(mcp) as client: + result = await client.call_tool("fetch_data", {"query": "users"}) + assert len(result.content) == 1 + content = result.content[0] + assert isinstance(content, TextContent) + assert "Fetching 'users' from https://api.example.com" in content.text + assert "secret123" in content.text + + +async def test_depends_with_async_function(mcp: FastMCP): + """Test that Depends works with async dependency functions.""" + + async def get_user_id() -> int: + return 42 + + @mcp.tool() + async def greet_user(name: str, user_id: int = Depends(get_user_id)) -> str: + return f"Hello {name}, your ID is {user_id}" + + async with Client(mcp) as client: + result = await client.call_tool("greet_user", {"name": "Alice"}) + assert len(result.content) == 1 + content = result.content[0] + assert isinstance(content, TextContent) + assert content.text == "Hello Alice, your ID is 42" + + +async def test_depends_with_async_context_manager(mcp: FastMCP): + """Test that Depends works with async context managers for resource management.""" + cleanup_called = False + + @asynccontextmanager + async def get_database(): + db = "db_connection" + try: + yield db + finally: + nonlocal cleanup_called + cleanup_called = True + + @mcp.tool() + async def query_db(sql: str, db: str = Depends(get_database)) -> str: + return f"Executing '{sql}' on {db}" + + async with Client(mcp) as client: + result = await client.call_tool("query_db", {"sql": "SELECT * FROM users"}) + assert len(result.content) == 1 + content = result.content[0] + assert isinstance(content, TextContent) + assert "Executing 'SELECT * FROM users' on db_connection" in content.text + assert cleanup_called + + +async def test_nested_dependencies(mcp: FastMCP): + """Test that dependencies can depend on other dependencies.""" + + def get_base_url() -> str: + return "https://api.example.com" + + def get_api_client(base_url: str = Depends(get_base_url)) -> dict[str, str]: + return {"base_url": base_url, "version": "v1"} + + @mcp.tool() + async def call_api( + endpoint: str, client: dict[str, str] = Depends(get_api_client) + ) -> str: + return f"Calling {client['base_url']}/{client['version']}/{endpoint}" + + async with Client(mcp) as client: + result = await client.call_tool("call_api", {"endpoint": "users"}) + assert len(result.content) == 1 + content = result.content[0] + assert isinstance(content, TextContent) + assert content.text == "Calling https://api.example.com/v1/users" + + +async def test_dependencies_excluded_from_schema(mcp: FastMCP): + """Test that dependency parameters don't appear in the tool schema.""" + + def get_config() -> dict[str, str]: + return {"key": "value"} + + @mcp.tool() + async def my_tool( + name: str, age: int, config: dict[str, str] = Depends(get_config) + ) -> str: + return f"{name} is {age} years old" + + tools = await mcp._list_tools_mcp() + tool = next(t for t in tools if t.name == "my_tool") + + assert "name" in tool.inputSchema["properties"] + assert "age" in tool.inputSchema["properties"] + assert "config" not in tool.inputSchema["properties"] + assert len(tool.inputSchema["properties"]) == 2 + + +async def test_current_context_dependency(mcp: FastMCP): + """Test that CurrentContext dependency provides access to FastMCP Context.""" + + @mcp.tool() + def use_context(ctx: Context = CurrentContext()) -> str: + assert isinstance(ctx, Context) + return HUZZAH + + async with Client(mcp) as client: + result = await client.call_tool("use_context", {}) + assert HUZZAH in str(result) + + +async def test_current_context_and_legacy_context_coexist(mcp: FastMCP): + """Test that CurrentContext dependency and legacy Context injection work together.""" + + @mcp.tool() + def use_both_contexts( + legacy_ctx: Context, + dep_ctx: Context = CurrentContext(), + ) -> str: + assert isinstance(legacy_ctx, Context) + assert isinstance(dep_ctx, Context) + assert legacy_ctx is dep_ctx + return HUZZAH + + async with Client(mcp) as client: + result = await client.call_tool("use_both_contexts", {}) + assert HUZZAH in str(result) + + +async def test_backward_compat_context_still_works(mcp: FastMCP): + """Test that existing Context injection via type annotation still works.""" + + @mcp.tool() + async def get_request_id(ctx: Context) -> str: + return ctx.request_id + + async with Client(mcp) as client: + result = await client.call_tool("get_request_id", {}) + assert len(result.content) == 1 + content = result.content[0] + assert isinstance(content, TextContent) + assert len(content.text) > 0 + + +async def test_sync_tool_with_async_dependency(mcp: FastMCP): + """Test that sync tools work with async dependencies.""" + + async def fetch_config() -> str: + return "loaded_config" + + @mcp.tool() + def process_data(value: int, config: str = Depends(fetch_config)) -> str: + return f"Processing {value} with {config}" + + async with Client(mcp) as client: + result = await client.call_tool("process_data", {"value": 100}) + assert len(result.content) == 1 + content = result.content[0] + assert isinstance(content, TextContent) + assert content.text == "Processing 100 with loaded_config" + + +async def test_dependency_caching(mcp: FastMCP): + """Test that dependencies are cached within a single tool call.""" + call_count = 0 + + def expensive_dependency() -> int: + nonlocal call_count + call_count += 1 + return 42 + + @mcp.tool() + async def tool_with_cached_dep( + dep1: int = Depends(expensive_dependency), + dep2: int = Depends(expensive_dependency), + ) -> str: + return f"{dep1} + {dep2} = {dep1 + dep2}" + + async with Client(mcp) as client: + result = await client.call_tool("tool_with_cached_dep", {}) + assert len(result.content) == 1 + content = result.content[0] + assert isinstance(content, TextContent) + assert content.text == "42 + 42 = 84" + assert call_count == 1 + + +async def test_context_and_depends_together(mcp: FastMCP): + """Test that Context type injection and Depends can be used together.""" + + def get_multiplier() -> int: + return 10 + + @mcp.tool() + async def mixed_deps( + value: int, ctx: Context, multiplier: int = Depends(get_multiplier) + ) -> str: + assert isinstance(ctx, Context) + assert ctx.request_id + assert len(ctx.request_id) > 0 + return ( + f"Request {ctx.request_id}: {value} * {multiplier} = {value * multiplier}" + ) + + async with Client(mcp) as client: + result = await client.call_tool("mixed_deps", {"value": 5}) + assert len(result.content) == 1 + content = result.content[0] + assert isinstance(content, TextContent) + assert "5 * 10 = 50" in content.text + assert "Request " in content.text + + +async def test_resource_with_dependency(mcp: FastMCP): + """Test that resources support dependency injection.""" + + def get_storage_path() -> str: + return "/data/config" + + @mcp.resource("config://settings") + async def get_settings(storage: str = Depends(get_storage_path)) -> str: + return f"Settings loaded from {storage}" + + async with Client(mcp) as client: + result = await client.read_resource("config://settings") + assert len(result) == 1 + content = result[0] + assert isinstance(content, TextResourceContents) + assert content.text == "Settings loaded from /data/config" + + +async def test_resource_with_context_and_dependency(mcp: FastMCP): + """Test that resources can use both Context and Depends.""" + + def get_prefix() -> str: + return "DATA" + + @mcp.resource("config://info") + async def get_info(ctx: Context, prefix: str = Depends(get_prefix)) -> str: + return f"{prefix}: Request {ctx.request_id}" + + async with Client(mcp) as client: + result = await client.read_resource("config://info") + assert len(result) == 1 + content = result[0] + assert isinstance(content, TextResourceContents) + assert "DATA: Request " in content.text + assert len(content.text.split("Request ")[1]) > 0 + + +async def test_prompt_with_dependency(mcp: FastMCP): + """Test that prompts support dependency injection.""" + + def get_tone() -> str: + return "friendly and helpful" + + @mcp.prompt() + async def custom_prompt(topic: str, tone: str = Depends(get_tone)) -> str: + return f"Write about {topic} in a {tone} tone" + + async with Client(mcp) as client: + result = await client.get_prompt("custom_prompt", {"topic": "Python"}) + assert len(result.messages) == 1 + message = result.messages[0] + content = message.content + assert isinstance(content, TextContent) + assert content.text == "Write about Python in a friendly and helpful tone" + + +async def test_prompt_with_context_and_dependency(mcp: FastMCP): + """Test that prompts can use both Context and Depends.""" + + def get_style() -> str: + return "concise" + + @mcp.prompt() + async def styled_prompt( + query: str, ctx: Context, style: str = Depends(get_style) + ) -> str: + assert isinstance(ctx, Context) + assert ctx.request_id + return f"Answer '{query}' in a {style} style" + + async with Client(mcp) as client: + result = await client.get_prompt("styled_prompt", {"query": "What is MCP?"}) + assert len(result.messages) == 1 + message = result.messages[0] + content = message.content + assert isinstance(content, TextContent) + assert content.text == "Answer 'What is MCP?' in a concise style" + + +async def test_resource_template_with_dependency(mcp: FastMCP): + """Test that resource templates support dependency injection.""" + + def get_base_path() -> str: + return "/var/data" + + @mcp.resource("data://{filename}") + async def get_file(filename: str, base_path: str = Depends(get_base_path)) -> str: + return f"Reading {base_path}/{filename}" + + async with Client(mcp) as client: + result = await client.read_resource("data://config.txt") + assert len(result) == 1 + content = result[0] + assert isinstance(content, TextResourceContents) + assert content.text == "Reading /var/data/config.txt" + + +async def test_resource_template_with_context_and_dependency(mcp: FastMCP): + """Test that resource templates can use both Context and Depends.""" + + def get_version() -> str: + return "v2" + + @mcp.resource("api://{endpoint}") + async def call_endpoint( + endpoint: str, ctx: Context, version: str = Depends(get_version) + ) -> str: + assert isinstance(ctx, Context) + assert ctx.request_id + return f"Calling {version}/{endpoint}" + + async with Client(mcp) as client: + result = await client.read_resource("api://users") + assert len(result) == 1 + content = result[0] + assert isinstance(content, TextResourceContents) + assert content.text == "Calling v2/users" + + +async def test_async_tool_context_manager_stays_open(mcp: FastMCP): + """Test that context manager dependencies stay open during async tool execution. + + Context managers must remain open while the async function executes, not just + while it's being called (which only returns a coroutine). + """ + + @mcp.tool() + async def query_data( + query: str, connection: Connection = Depends(get_connection) + ) -> str: + assert connection.is_open + return f"open={connection.is_open}" + + async with Client(mcp) as client: + result = await client.call_tool("query_data", {"query": "test"}) + content = result.content[0] + assert isinstance(content, TextContent) + assert content.text == "open=True" + + +async def test_async_resource_context_manager_stays_open(mcp: FastMCP): + """Test that context manager dependencies stay open during async resource execution.""" + + @mcp.resource("data://config") + async def load_config(connection: Connection = Depends(get_connection)) -> str: + assert connection.is_open + return f"open={connection.is_open}" + + async with Client(mcp) as client: + result = await client.read_resource("data://config") + content = result[0] + assert isinstance(content, TextResourceContents) + assert content.text == "open=True" + + +async def test_async_resource_template_context_manager_stays_open(mcp: FastMCP): + """Test that context manager dependencies stay open during async resource template execution.""" + + @mcp.resource("user://{user_id}") + async def get_user( + user_id: str, connection: Connection = Depends(get_connection) + ) -> str: + assert connection.is_open + return f"open={connection.is_open},user={user_id}" + + async with Client(mcp) as client: + result = await client.read_resource("user://123") + content = result[0] + assert isinstance(content, TextResourceContents) + assert "open=True" in content.text + + +async def test_async_prompt_context_manager_stays_open(mcp: FastMCP): + """Test that context manager dependencies stay open during async prompt execution.""" + + @mcp.prompt() + async def research_prompt( + topic: str, connection: Connection = Depends(get_connection) + ) -> str: + assert connection.is_open + return f"open={connection.is_open},topic={topic}" + + async with Client(mcp) as client: + result = await client.get_prompt("research_prompt", {"topic": "AI"}) + message = result.messages[0] + content = message.content + assert isinstance(content, TextContent) + assert "open=True" in content.text + + +async def test_argument_validation_with_dependencies(mcp: FastMCP): + """Test that user arguments are still validated when dependencies are present.""" + + def get_config() -> dict[str, str]: + return {"key": "value"} + + @mcp.tool() + async def validated_tool( + age: int, # Should validate type + config: dict[str, str] = Depends(get_config), + ) -> str: + return f"age={age}" + + async with Client(mcp) as client: + # Valid argument + result = await client.call_tool("validated_tool", {"age": 25}) + content = result.content[0] + assert isinstance(content, TextContent) + assert content.text == "age=25" + + # Invalid argument type should fail validation + with pytest.raises(Exception): # Will be ToolError wrapping validation error + await client.call_tool("validated_tool", {"age": "not a number"}) + + +async def test_connection_dependency_excluded_from_tool_schema(mcp: FastMCP): + """Test that Connection dependency parameter is excluded from tool schema.""" + + @mcp.tool() + async def with_connection( + name: str, connection: Connection = Depends(get_connection) + ) -> str: + return name + + tools = await mcp._list_tools_mcp() + tool = next(t for t in tools if t.name == "with_connection") + + assert "name" in tool.inputSchema["properties"] + assert "connection" not in tool.inputSchema["properties"] + + +async def test_sync_tool_context_manager_stays_open(mcp: FastMCP): + """Test that sync context manager dependencies work with tools.""" + conn = Connection() + + @contextmanager + def get_sync_connection(): + conn.is_open = True + try: + yield conn + finally: + conn.is_open = False + + @mcp.tool() + async def query_sync( + query: str, connection: Connection = Depends(get_sync_connection) + ) -> str: + assert connection.is_open + return f"open={connection.is_open}" + + async with Client(mcp) as client: + result = await client.call_tool("query_sync", {"query": "test"}) + content = result.content[0] + assert isinstance(content, TextContent) + assert content.text == "open=True" + assert not conn.is_open + + +async def test_sync_resource_context_manager_stays_open(mcp: FastMCP): + """Test that sync context manager dependencies work with resources.""" + conn = Connection() + + @contextmanager + def get_sync_connection(): + conn.is_open = True + try: + yield conn + finally: + conn.is_open = False + + @mcp.resource("data://sync") + async def load_sync(connection: Connection = Depends(get_sync_connection)) -> str: + assert connection.is_open + return f"open={connection.is_open}" + + async with Client(mcp) as client: + result = await client.read_resource("data://sync") + content = result[0] + assert isinstance(content, TextResourceContents) + assert content.text == "open=True" + assert not conn.is_open + + +async def test_sync_resource_template_context_manager_stays_open(mcp: FastMCP): + """Test that sync context manager dependencies work with resource templates.""" + conn = Connection() + + @contextmanager + def get_sync_connection(): + conn.is_open = True + try: + yield conn + finally: + conn.is_open = False + + @mcp.resource("item://{item_id}") + async def get_item( + item_id: str, connection: Connection = Depends(get_sync_connection) + ) -> str: + assert connection.is_open + return f"open={connection.is_open},item={item_id}" + + async with Client(mcp) as client: + result = await client.read_resource("item://456") + content = result[0] + assert isinstance(content, TextResourceContents) + assert "open=True" in content.text + assert not conn.is_open + + +async def test_sync_prompt_context_manager_stays_open(mcp: FastMCP): + """Test that sync context manager dependencies work with prompts.""" + conn = Connection() + + @contextmanager + def get_sync_connection(): + conn.is_open = True + try: + yield conn + finally: + conn.is_open = False + + @mcp.prompt() + async def sync_prompt( + topic: str, connection: Connection = Depends(get_sync_connection) + ) -> str: + assert connection.is_open + return f"open={connection.is_open},topic={topic}" + + async with Client(mcp) as client: + result = await client.get_prompt("sync_prompt", {"topic": "test"}) + message = result.messages[0] + content = message.content + assert isinstance(content, TextContent) + assert "open=True" in content.text + assert not conn.is_open + + +async def test_external_user_cannot_override_dependency(mcp: FastMCP): + """Test that external MCP clients cannot override dependency parameters.""" + + def get_admin_status() -> str: + return "not_admin" + + @mcp.tool() + async def check_permission( + action: str, admin: str = Depends(get_admin_status) + ) -> str: + return f"action={action},admin={admin}" + + # Verify dependency is NOT in the schema + tools = await mcp._list_tools_mcp() + tool = next(t for t in tools if t.name == "check_permission") + assert "admin" not in tool.inputSchema["properties"] + + async with Client(mcp) as client: + # Normal call - dependency is resolved + result = await client.call_tool("check_permission", {"action": "read"}) + content = result.content[0] + assert isinstance(content, TextContent) + assert "admin=not_admin" in content.text + + # Try to override dependency - rejected (not in schema) + with pytest.raises(Exception): + await client.call_tool( + "check_permission", {"action": "read", "admin": "hacker"} + ) + + +async def test_prompt_dependency_cannot_be_overridden_externally(mcp: FastMCP): + """Test that external callers cannot override prompt dependencies. + + This is a security test - dependencies should NEVER be overridable from + outside the server, even for prompts which don't validate against strict schemas. + """ + + def get_secret() -> str: + return "real_secret" + + @mcp.prompt() + async def secure_prompt(topic: str, secret: str = Depends(get_secret)) -> str: + return f"Topic: {topic}, Secret: {secret}" + + async with Client(mcp) as client: + # Normal call - should use dependency + result = await client.get_prompt("secure_prompt", {"topic": "test"}) + message = result.messages[0] + content = message.content + assert isinstance(content, TextContent) + assert "Secret: real_secret" in content.text + + # Try to override dependency - should be ignored/rejected + result = await client.get_prompt( + "secure_prompt", + {"topic": "test", "secret": "HACKED"}, # Attempt override + ) + message = result.messages[0] + content = message.content + assert isinstance(content, TextContent) + # Should still use real dependency, not hacked value + assert "Secret: real_secret" in content.text + assert "HACKED" not in content.text + + +async def test_resource_dependency_cannot_be_overridden_externally(mcp: FastMCP): + """Test that external callers cannot override resource dependencies.""" + + def get_api_key() -> str: + return "real_api_key" + + @mcp.resource("data://config") + async def get_config(api_key: str = Depends(get_api_key)) -> str: + return f"API Key: {api_key}" + + async with Client(mcp) as client: + # Normal call + result = await client.read_resource("data://config") + content = result[0] + assert isinstance(content, TextResourceContents) + assert "API Key: real_api_key" in content.text + + # Resources don't accept arguments from clients (static URI) + # so this scenario is less of a concern, but documenting it + + +async def test_resource_template_dependency_cannot_be_overridden_externally( + mcp: FastMCP, +): + """Test that external callers cannot override resource template dependencies. + + Resource templates extract parameters from the URI path, so there's a risk + that a dependency parameter name could match a URI parameter. + """ + + def get_auth_token() -> str: + return "real_token" + + @mcp.resource("user://{user_id}") + async def get_user(user_id: str, token: str = Depends(get_auth_token)) -> str: + return f"User: {user_id}, Token: {token}" + + async with Client(mcp) as client: + # Normal call + result = await client.read_resource("user://123") + content = result[0] + assert isinstance(content, TextResourceContents) + assert "User: 123, Token: real_token" in content.text + + # Try to inject token via URI (shouldn't be possible with this pattern) + # But if URI was user://{token}, it could extract it + + +async def test_resource_template_uri_cannot_match_dependency_name(mcp: FastMCP): + """Test that URI parameters cannot have the same name as dependencies. + + If a URI template tries to use a parameter name that's also a dependency, + the template creation should fail because the dependency is excluded from + the user-facing signature. + """ + + def get_token() -> str: + return "real_token" + + # This should fail - {token} in URI but token is a dependency parameter + with pytest.raises(ValueError, match="URI parameters.*must be a subset"): + + @mcp.resource("auth://{token}/validate") + async def validate(token: str = Depends(get_token)) -> str: + return f"Validating with: {token}" diff --git a/tests/server/test_server_docket.py b/tests/server/test_server_docket.py new file mode 100644 index 0000000000..442d725178 --- /dev/null +++ b/tests/server/test_server_docket.py @@ -0,0 +1,193 @@ +"""Tests for Docket integration in FastMCP.""" + +import asyncio +from contextlib import asynccontextmanager + +import pytest +from docket import Docket +from docket.worker import Worker + +from fastmcp import FastMCP +from fastmcp.client import Client +from fastmcp.dependencies import CurrentDocket, CurrentWorker +from fastmcp.exceptions import ToolError +from fastmcp.server.dependencies import get_context +from fastmcp.utilities.tests import temporary_settings + +HUZZAH = "huzzah!" + + +@pytest.fixture(autouse=True) +def enable_docket(): + """Enable Docket support for all tests in this suite.""" + with temporary_settings(enable_docket=True): + yield + + +async def test_docket_disabled(): + """Verify that Docket errors when flag is disabled.""" + with temporary_settings(enable_docket=False): + mcp = FastMCP("test-server") + + @mcp.tool() + def needs_docket(docket: Docket = CurrentDocket()) -> str: + return f"Got docket: {type(docket).__name__}" + + async with Client(mcp) as client: + with pytest.raises(ToolError, match="Failed to resolve dependency"): + await client.call_tool("needs_docket", {}) + + +async def test_current_docket_with_flag_enabled(): + """CurrentDocket dependency works when experimental flag is enabled.""" + mcp = FastMCP("test-server") + + @mcp.tool() + def check_docket(docket: Docket = CurrentDocket()) -> str: + assert isinstance(docket, Docket) + return HUZZAH + + async with Client(mcp) as client: + result = await client.call_tool("check_docket", {}) + assert HUZZAH in str(result) + + +async def test_current_worker_with_flag_enabled(): + """CurrentWorker dependency works when experimental flag is enabled.""" + mcp = FastMCP("test-server") + + @mcp.tool() + def check_worker( + worker: Worker = CurrentWorker(), + docket: Docket = CurrentDocket(), + ) -> str: + assert isinstance(worker, Worker) + assert worker.docket is docket + return HUZZAH + + async with Client(mcp) as client: + result = await client.call_tool("check_worker", {}) + assert HUZZAH in str(result) + + +async def test_worker_executes_background_tasks(): + """Verify that the Docket Worker is running and executes tasks.""" + task_completed = asyncio.Event() + mcp = FastMCP("test-server") + + @mcp.tool() + async def schedule_work( + task_name: str, + docket: Docket = CurrentDocket(), + ) -> str: + """Schedule a background task.""" + + async def background_task(name: str): + """Simple background task that signals completion.""" + task_completed.set() + + # Schedule the task (Worker running in background will execute it) + await docket.add(background_task)(task_name) + + return f"Scheduled {task_name}" + + async with Client(mcp) as client: + result = await client.call_tool("schedule_work", {"task_name": "test-task"}) + assert "Scheduled test-task" in str(result) + + # Wait for background task to execute (max 2 seconds) + await asyncio.wait_for(task_completed.wait(), timeout=2.0) + + +async def test_current_docket_in_resource(): + """CurrentDocket works in resources when flag is enabled.""" + mcp = FastMCP("test-server") + + @mcp.resource("docket://info") + def get_docket_info(docket: Docket = CurrentDocket()) -> str: + assert isinstance(docket, Docket) + return HUZZAH + + async with Client(mcp) as client: + result = await client.read_resource("docket://info") + assert HUZZAH in str(result) + + +async def test_current_docket_in_prompt(): + """CurrentDocket works in prompts when flag is enabled.""" + mcp = FastMCP("test-server") + + @mcp.prompt() + def task_prompt(task_type: str, docket: Docket = CurrentDocket()) -> str: + assert isinstance(docket, Docket) + return HUZZAH + + async with Client(mcp) as client: + result = await client.get_prompt("task_prompt", {"task_type": "background"}) + assert HUZZAH in str(result) + + +async def test_current_docket_in_resource_template(): + """CurrentDocket works in resource templates when flag is enabled.""" + mcp = FastMCP("test-server") + + @mcp.resource("docket://tasks/{task_id}") + def get_task_status(task_id: str, docket: Docket = CurrentDocket()) -> str: + assert isinstance(docket, Docket) + return HUZZAH + + async with Client(mcp) as client: + result = await client.read_resource("docket://tasks/123") + assert HUZZAH in str(result) + + +async def test_concurrent_calls_maintain_isolation(): + """Multiple concurrent calls each get the same Docket instance.""" + mcp = FastMCP("test-server") + docket_ids = [] + + @mcp.tool() + def capture_docket_id(call_num: int, docket: Docket = CurrentDocket()) -> str: + docket_ids.append((call_num, id(docket))) + return HUZZAH + + async with Client(mcp) as client: + results = await asyncio.gather( + client.call_tool("capture_docket_id", {"call_num": 1}), + client.call_tool("capture_docket_id", {"call_num": 2}), + client.call_tool("capture_docket_id", {"call_num": 3}), + ) + + for result in results: + assert HUZZAH in str(result) + + # All calls should see the same Docket instance + assert len(docket_ids) == 3 + first_id = docket_ids[0][1] + assert all(docket_id == first_id for _, docket_id in docket_ids) + + +async def test_user_lifespan_still_works_with_docket(): + """User-provided lifespan works correctly alongside Docket.""" + lifespan_entered = False + + @asynccontextmanager + async def custom_lifespan(server: FastMCP): + nonlocal lifespan_entered + lifespan_entered = True + yield {"custom_data": "test_value"} + + mcp = FastMCP("test-server", lifespan=custom_lifespan) + + @mcp.tool() + def check_both(docket: Docket = CurrentDocket()) -> str: + assert isinstance(docket, Docket) + ctx = get_context() + lifespan_data = ctx.request_context.lifespan_context + assert lifespan_data.get("custom_data") == "test_value" + return HUZZAH + + async with Client(mcp) as client: + assert lifespan_entered + result = await client.call_tool("check_both", {}) + assert HUZZAH in str(result) diff --git a/tests/server/test_tool_annotations.py b/tests/server/test_tool_annotations.py index cbe47465ba..8603813282 100644 --- a/tests/server/test_tool_annotations.py +++ b/tests/server/test_tool_annotations.py @@ -219,3 +219,37 @@ def create_item(name: str, value: int) -> dict[str, Any]: "create_item", {"name": "test_item", "value": 42} ) assert result.data == {"name": "test_item", "value": 42} + + +async def test_task_execution_auto_populated_for_task_enabled_tool(): + """Test that execution.task is automatically set when tool has task=True.""" + mcp = FastMCP("Test Server") + + @mcp.tool(task=True) + async def background_tool(data: str) -> str: + """A tool that runs in background.""" + return f"Processed: {data}" + + async with Client(mcp) as client: + tools_result = await client.list_tools() + assert len(tools_result) == 1 + assert tools_result[0].name == "background_tool" + assert tools_result[0].execution is not None + assert tools_result[0].execution.task == "optional" + + +async def test_task_execution_omitted_for_task_disabled_tool(): + """Test that execution is not set when tool has task=False or default.""" + mcp = FastMCP("Test Server") + + @mcp.tool(task=False) + def sync_tool(data: str) -> str: + """A synchronous tool.""" + return f"Processed: {data}" + + async with Client(mcp) as client: + tools_result = await client.list_tools() + assert len(tools_result) == 1 + assert tools_result[0].name == "sync_tool" + # execution should be None for non-task tools (default is False, omitted) + assert tools_result[0].execution is None diff --git a/tests/tools/test_tool.py b/tests/tools/test_tool.py index f47df7a1af..c707bf7999 100644 --- a/tests/tools/test_tool.py +++ b/tests/tools/test_tool.py @@ -52,6 +52,7 @@ def add(a: int, b: int) -> int: "type": "object", "x-fastmcp-wrap-result": True, }, + "task": False, "fn": HasName("add"), } ) @@ -99,6 +100,7 @@ async def fetch_data(url: str) -> str: "type": "object", "x-fastmcp-wrap-result": True, }, + "task": False, "fn": HasName("fetch_data"), } ) @@ -133,6 +135,7 @@ def __call__(self, x: int, y: int) -> int: "type": "object", "x-fastmcp-wrap-result": True, }, + "task": False, } ) @@ -166,6 +169,7 @@ async def __call__(self, x: int, y: int) -> int: "type": "object", "x-fastmcp-wrap-result": True, }, + "task": False, } ) @@ -207,6 +211,7 @@ def create_user(user: UserInput, flag: bool) -> dict: "type": "object", }, "output_schema": {"additionalProperties": True, "type": "object"}, + "task": False, "fn": HasName("create_user"), } ) @@ -269,6 +274,7 @@ def test_lambda(self): "required": ["x"], "type": "object", }, + "task": False, } ) @@ -301,6 +307,7 @@ def add(_a: int, _b: int) -> int: "required": ["_a", "_b"], "type": "object", }, + "task": False, } ) @@ -355,6 +362,7 @@ def add(self, x: int, y: int) -> int: "type": "object", "x-fastmcp-wrap-result": True, }, + "task": False, } ) diff --git a/tests/utilities/test_types.py b/tests/utilities/test_types.py index 0758a1dbe0..3b74c06966 100644 --- a/tests/utilities/test_types.py +++ b/tests/utilities/test_types.py @@ -1,8 +1,5 @@ import base64 import os -import tempfile -from pathlib import Path -from types import EllipsisType from typing import Annotated, Any import pytest @@ -13,7 +10,6 @@ Audio, File, Image, - find_kwarg_by_type, get_cached_typeadapter, is_class_member_of_type, issubclass_safe, @@ -143,14 +139,15 @@ def test_image_path_expansion_with_tilde(self): assert not str(image.path).startswith("~") assert str(image.path).startswith(os.path.expanduser("~")) - def test_image_path_expansion_with_env_var(self, monkeypatch): + def test_image_path_expansion_with_env_var(self, monkeypatch, tmp_path): """Test that environment variables are expanded.""" - test_dir = tempfile.mkdtemp() - monkeypatch.setenv("TEST_PATH", test_dir) + test_dir = tmp_path / "test_path" + test_dir.mkdir() + monkeypatch.setenv("TEST_PATH", str(test_dir)) image = Image(path="$TEST_PATH/test.png") assert image.path is not None assert not str(image.path).startswith("$TEST_PATH") - expected_path = Path(test_dir) / "test.png" + expected_path = test_dir / "test.png" assert image.path == expected_path def test_image_initialization_with_data(self): @@ -266,14 +263,15 @@ def test_audio_path_expansion_with_tilde(self): assert not str(audio.path).startswith("~") assert str(audio.path).startswith(os.path.expanduser("~")) - def test_audio_path_expansion_with_env_var(self, monkeypatch): + def test_audio_path_expansion_with_env_var(self, monkeypatch, tmp_path): """Test that environment variables are expanded.""" - test_dir = tempfile.mkdtemp() - monkeypatch.setenv("TEST_AUDIO_PATH", test_dir) + test_dir = tmp_path / "test_audio_path" + test_dir.mkdir() + monkeypatch.setenv("TEST_AUDIO_PATH", str(test_dir)) audio = Audio(path="$TEST_AUDIO_PATH/test.wav") assert audio.path is not None assert not str(audio.path).startswith("$TEST_AUDIO_PATH") - expected_path = Path(test_dir) / "test.wav" + expected_path = test_dir / "test.wav" assert audio.path == expected_path def test_audio_initialization_with_data(self): @@ -380,14 +378,15 @@ def test_file_path_expansion_with_tilde(self): assert not str(file.path).startswith("~") assert str(file.path).startswith(os.path.expanduser("~")) - def test_file_path_expansion_with_env_var(self, monkeypatch): + def test_file_path_expansion_with_env_var(self, monkeypatch, tmp_path): """Test that environment variables are expanded.""" - test_dir = tempfile.mkdtemp() - monkeypatch.setenv("TEST_FILE_PATH", test_dir) + test_dir = tmp_path / "test_file_path" + test_dir.mkdir() + monkeypatch.setenv("TEST_FILE_PATH", str(test_dir)) file = File(path="$TEST_FILE_PATH/test.txt") assert file.path is not None assert not str(file.path).startswith("$TEST_FILE_PATH") - expected_path = Path(test_dir) / "test.txt" + expected_path = test_dir / "test.txt" assert file.path == expected_path def test_file_initialization_with_data(self): @@ -493,130 +492,6 @@ def test_to_resource_content_with_override_mime_type(self, tmp_path): assert resource.resource.mimeType == "application/custom" -class TestFindKwargByType: - def test_exact_type_match(self): - """Test finding parameter with exact type match.""" - - def func(a: int, b: str, c: BaseClass): - pass - - assert find_kwarg_by_type(func, BaseClass) == "c" - - def test_no_matching_parameter(self): - """Test finding parameter when no match exists.""" - - def func(a: int, b: str, c: OtherClass): - pass - - assert find_kwarg_by_type(func, BaseClass) is None - - def test_parameter_with_no_annotation(self): - """Test with a parameter that has no type annotation.""" - - def func(a: int, b, c: BaseClass): - pass - - assert find_kwarg_by_type(func, BaseClass) == "c" - - def test_union_type_match_pipe_syntax(self): - """Test finding parameter with union type using pipe syntax.""" - - def func(a: int, b: str | BaseClass, c: str): - pass - - assert find_kwarg_by_type(func, BaseClass) == "b" - - def test_union_type_match_typing_union(self): - """Test finding parameter with union type using Union.""" - - def func(a: int, b: str | BaseClass, c: str): - pass - - assert find_kwarg_by_type(func, BaseClass) == "b" - - def test_annotated_type_match(self): - """Test finding parameter with Annotated type.""" - - def func(a: int, b: Annotated[BaseClass, "metadata"], c: str): - pass - - assert find_kwarg_by_type(func, BaseClass) == "b" - - def test_method_parameter(self): - """Test finding parameter in a class method.""" - - class TestClass: - def method(self, a: int, b: BaseClass): - pass - - instance = TestClass() - assert find_kwarg_by_type(instance.method, BaseClass) == "b" - - def test_static_method_parameter(self): - """Test finding parameter in a static method.""" - - class TestClass: - @staticmethod - def static_method(a: int, b: BaseClass, c: str): - pass - - assert find_kwarg_by_type(TestClass.static_method, BaseClass) == "b" - - def test_class_method_parameter(self): - """Test finding parameter in a class method.""" - - class TestClass: - @classmethod - def class_method(cls, a: int, b: BaseClass, c: str): - pass - - assert find_kwarg_by_type(TestClass.class_method, BaseClass) == "b" - - def test_multiple_matching_parameters(self): - """Test finding first parameter when multiple matches exist.""" - - def func(a: BaseClass, b: str, c: BaseClass): - pass - - # Should return the first match - assert find_kwarg_by_type(func, BaseClass) == "a" - - def test_subclass_match(self): - """Test finding parameter with a subclass of the target type.""" - - def func(a: int, b: ChildClass, c: str): - pass - - assert find_kwarg_by_type(func, BaseClass) == "b" - - def test_nonstandard_annotation(self): - """Test finding parameter with a nonstandard annotation like an - instance. This is irregular.""" - - SENTINEL = object() - - def func(a: int, b: SENTINEL, c: str): # type: ignore - pass - - assert find_kwarg_by_type(func, SENTINEL) is None # type: ignore - - def test_ellipsis_annotation(self): - """Test finding parameter with an ellipsis annotation.""" - - def func(a: int, b: EllipsisType, c: str): # type: ignore # noqa: F821 - pass - - assert find_kwarg_by_type(func, EllipsisType) == "b" # type: ignore - - def test_missing_type_annotation(self): - """Test finding parameter with a missing type annotation.""" - - def func(a: int, b, c: str): - pass - - assert find_kwarg_by_type(func, str) == "c" - - class TestReplaceType: @pytest.mark.parametrize( "input,type_map,expected", diff --git a/uv.lock b/uv.lock index 808099901d..6ce8d743b8 100644 --- a/uv.lock +++ b/uv.lock @@ -39,6 +39,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918, upload-time = "2024-11-30T04:30:10.946Z" }, ] +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + [[package]] name = "attrs" version = "25.3.0" @@ -69,6 +78,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, ] +[[package]] +name = "backports-tarfile" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/86/72/cd9b395f25e290e633655a100af28cb253e4393396264a98bd5f5951d50f/backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991", size = 86406, upload-time = "2024-05-28T17:01:54.731Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/fa/123043af240e49752f1c4bd24da5053b6bd00cad78c2be53c0d1e8b975bc/backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34", size = 30181, upload-time = "2024-05-28T17:01:53.112Z" }, +] + [[package]] name = "beartype" version = "0.22.2" @@ -254,6 +272,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, ] +[[package]] +name = "cloudpickle" +version = "3.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414", size = 22330, upload-time = "2025-11-03T09:25:26.604Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a", size = 22228, upload-time = "2025-11-03T09:25:25.534Z" }, +] + [[package]] name = "colorama" version = "0.4.6" @@ -523,6 +550,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7b/8f/c4d9bafc34ad7ad5d8dc16dd1347ee0e507a52c3adb6bfa8887e1c6a26ba/executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa", size = 26702, upload-time = "2025-01-22T15:41:25.929Z" }, ] +[[package]] +name = "fakeredis" +version = "2.32.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "redis" }, + { name = "sortedcontainers" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/14/b47b8471303af7deed7080290c14cff27a831fa47b38f45643e6bf889cee/fakeredis-2.32.1.tar.gz", hash = "sha256:dd8246db159f0b66a1ced7800c9d5ef07769e3d2fde44b389a57f2ce2834e444", size = 171582, upload-time = "2025-11-06T01:40:57.836Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/d2/c28f6909864bfdb7411bb8f39fabedb5a50da1cbd7da5a1a3a46dfea2eab/fakeredis-2.32.1-py3-none-any.whl", hash = "sha256:e80c8886db2e47ba784f7dfe66aad6cd2eab76093c6bfda50041e5bc890d46cf", size = 118964, upload-time = "2025-11-06T01:40:55.885Z" }, +] + +[package.optional-dependencies] +lua = [ + { name = "lupa" }, +] + [[package]] name = "fancycompleter" version = "0.11.1" @@ -562,8 +608,9 @@ dependencies = [ { name = "mcp" }, { name = "openapi-pydantic" }, { name = "platformdirs" }, - { name = "py-key-value-aio", extra = ["disk", "memory"] }, + { name = "py-key-value-aio", extra = ["disk", "keyring", "memory"] }, { name = "pydantic", extra = ["email"] }, + { name = "pydocket" }, { name = "pyperclip" }, { name = "python-dotenv" }, { name = "rich" }, @@ -614,8 +661,9 @@ requires-dist = [ { name = "openai", marker = "extra == 'openai'", specifier = ">=1.102.0" }, { name = "openapi-pydantic", specifier = ">=0.5.1" }, { name = "platformdirs", specifier = ">=4.0.0" }, - { name = "py-key-value-aio", extras = ["disk", "memory"], specifier = ">=0.2.8,<0.4.0" }, + { name = "py-key-value-aio", extras = ["disk", "keyring", "memory"], specifier = ">=0.3.0,<0.4.0" }, { name = "pydantic", extras = ["email"], specifier = ">=2.11.7" }, + { name = "pydocket", specifier = ">=0.14.0" }, { name = "pyperclip", specifier = ">=1.9.0" }, { name = "python-dotenv", specifier = ">=1.1.0" }, { name = "rich", specifier = ">=13.9.4" }, @@ -705,6 +753,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + [[package]] name = "iniconfig" version = "2.1.0" @@ -797,6 +857,42 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, ] +[[package]] +name = "jaraco-classes" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780, upload-time = "2024-03-31T07:27:36.643Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777, upload-time = "2024-03-31T07:27:34.792Z" }, +] + +[[package]] +name = "jaraco-context" +version = "6.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-tarfile", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/ad/f3777b81bf0b6e7bc7514a1656d3e637b2e8e15fab2ce3235730b3e7a4e6/jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3", size = 13912, upload-time = "2024-08-20T03:39:27.358Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/db/0c52c4cf5e4bd9f5d7135ec7669a3a767af21b3a308e1ed3674881e52b62/jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4", size = 6825, upload-time = "2024-08-20T03:39:25.966Z" }, +] + +[[package]] +name = "jaraco-functools" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/ed/1aa2d585304ec07262e1a83a9889880701079dde796ac7b1d1826f40c63d/jaraco_functools-4.3.0.tar.gz", hash = "sha256:cfd13ad0dd2c47a3600b439ef72d8615d482cedcff1632930d6f28924d92f294", size = 19755, upload-time = "2025-08-18T20:05:09.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/09/726f168acad366b11e420df31bf1c702a54d373a83f968d94141a8c3fde0/jaraco_functools-4.3.0-py3-none-any.whl", hash = "sha256:227ff8ed6f7b8f62c56deff101545fa7543cf2c8e7b82a7c2116e672f29c26e8", size = 10408, upload-time = "2025-08-18T20:05:08.69Z" }, +] + [[package]] name = "jedi" version = "0.19.2" @@ -809,6 +905,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, ] +[[package]] +name = "jeepney" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" }, +] + [[package]] name = "jiter" version = "0.10.0" @@ -923,6 +1028,98 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, ] +[[package]] +name = "keyring" +version = "25.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.12'" }, + { name = "jaraco-classes" }, + { name = "jaraco-context" }, + { name = "jaraco-functools" }, + { name = "jeepney", marker = "sys_platform == 'linux'" }, + { name = "pywin32-ctypes", marker = "sys_platform == 'win32'" }, + { name = "secretstorage", marker = "sys_platform == 'linux'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/674af6ef2f97d56f0ab5153bf0bfa28ccb6c3ed4d1babf4305449668807b/keyring-25.7.0.tar.gz", hash = "sha256:fe01bd85eb3f8fb3dd0405defdeac9a5b4f6f0439edbb3149577f244a2e8245b", size = 63516, upload-time = "2025-11-16T16:26:09.482Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/db/e655086b7f3a705df045bf0933bdd9c2f79bb3c97bfef1384598bb79a217/keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f", size = 39160, upload-time = "2025-11-16T16:26:08.402Z" }, +] + +[[package]] +name = "lupa" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b8/1c/191c3e6ec6502e3dbe25a53e27f69a5daeac3e56de1f73c0138224171ead/lupa-2.6.tar.gz", hash = "sha256:9a770a6e89576be3447668d7ced312cd6fd41d3c13c2462c9dc2c2ab570e45d9", size = 7240282, upload-time = "2025-10-24T07:20:29.738Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/15/713cab5d0dfa4858f83b99b3e0329072df33dc14fc3ebbaa017e0f9755c4/lupa-2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b3dabda836317e63c5ad052826e156610f356a04b3003dfa0dbe66b5d54d671", size = 954828, upload-time = "2025-10-24T07:17:15.726Z" }, + { url = "https://files.pythonhosted.org/packages/2e/71/704740cbc6e587dd6cc8dabf2f04820ac6a671784e57cc3c29db795476db/lupa-2.6-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8726d1c123bbe9fbb974ce29825e94121824e66003038ff4532c14cc2ed0c51c", size = 1919259, upload-time = "2025-10-24T07:17:18.586Z" }, + { url = "https://files.pythonhosted.org/packages/eb/18/f248341c423c5d48837e35584c6c3eb4acab7e722b6057d7b3e28e42dae8/lupa-2.6-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f4e159e7d814171199b246f9235ca8961f6461ea8c1165ab428afa13c9289a94", size = 984998, upload-time = "2025-10-24T07:17:20.428Z" }, + { url = "https://files.pythonhosted.org/packages/44/1e/8a4bd471e018aad76bcb9455d298c2c96d82eced20f2ae8fcec8cd800948/lupa-2.6-cp310-cp310-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:202160e80dbfddfb79316692a563d843b767e0f6787bbd1c455f9d54052efa6c", size = 1174871, upload-time = "2025-10-24T07:17:22.755Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5c/3a3f23fd6a91b0986eea1ceaf82ad3f9b958fe3515a9981fb9c4eb046c8b/lupa-2.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5deede7c5b36ab64f869dae4831720428b67955b0bb186c8349cf6ea121c852b", size = 1057471, upload-time = "2025-10-24T07:17:24.908Z" }, + { url = "https://files.pythonhosted.org/packages/45/ac/01be1fed778fb0c8f46ee8cbe344e4d782f6806fac12717f08af87aa4355/lupa-2.6-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86f04901f920bbf7c0cac56807dc9597e42347123e6f1f3ca920f15f54188ce5", size = 2100592, upload-time = "2025-10-24T07:17:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/3f/6c/1a05bb873e30830f8574e10cd0b4cdbc72e9dbad2a09e25810b5e3b1f75d/lupa-2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6deef8f851d6afb965c84849aa5b8c38856942df54597a811ce0369ced678610", size = 1081396, upload-time = "2025-10-24T07:17:29.064Z" }, + { url = "https://files.pythonhosted.org/packages/a2/c2/a19dd80d6dc98b39bbf8135b8198e38aa7ca3360b720eac68d1d7e9286b5/lupa-2.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:21f2b5549681c2a13b1170a26159d30875d367d28f0247b81ca347222c755038", size = 1192007, upload-time = "2025-10-24T07:17:31.362Z" }, + { url = "https://files.pythonhosted.org/packages/4f/43/e1b297225c827f55752e46fdbfb021c8982081b0f24490e42776ea69ae3b/lupa-2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:66eea57630eab5e6f49fdc5d7811c0a2a41f2011be4ea56a087ea76112011eb7", size = 2196661, upload-time = "2025-10-24T07:17:33.484Z" }, + { url = "https://files.pythonhosted.org/packages/2e/8f/2272d429a7fa9dc8dbd6e9c5c9073a03af6007eb22a4c78829fec6a34b80/lupa-2.6-cp310-cp310-win32.whl", hash = "sha256:60a403de8cab262a4fe813085dd77010effa6e2eb1886db2181df803140533b1", size = 1412738, upload-time = "2025-10-24T07:17:35.11Z" }, + { url = "https://files.pythonhosted.org/packages/35/2a/1708911271dd49ad87b4b373b5a4b0e0a0516d3d2af7b76355946c7ee171/lupa-2.6-cp310-cp310-win_amd64.whl", hash = "sha256:e4656a39d93dfa947cf3db56dc16c7916cb0cc8024acd3a952071263f675df64", size = 1656898, upload-time = "2025-10-24T07:17:36.949Z" }, + { url = "https://files.pythonhosted.org/packages/ca/29/1f66907c1ebf1881735afa695e646762c674f00738ebf66d795d59fc0665/lupa-2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6d988c0f9331b9f2a5a55186701a25444ab10a1432a1021ee58011499ecbbdd5", size = 962875, upload-time = "2025-10-24T07:17:39.107Z" }, + { url = "https://files.pythonhosted.org/packages/e6/67/4a748604be360eb9c1c215f6a0da921cd1a2b44b2c5951aae6fb83019d3a/lupa-2.6-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ebe1bbf48259382c72a6fe363dea61a0fd6fe19eab95e2ae881e20f3654587bf", size = 1935390, upload-time = "2025-10-24T07:17:41.427Z" }, + { url = "https://files.pythonhosted.org/packages/ac/0c/8ef9ee933a350428b7bdb8335a37ef170ab0bb008bbf9ca8f4f4310116b6/lupa-2.6-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:a8fcee258487cf77cdd41560046843bb38c2e18989cd19671dd1e2596f798306", size = 992193, upload-time = "2025-10-24T07:17:43.231Z" }, + { url = "https://files.pythonhosted.org/packages/65/46/e6c7facebdb438db8a65ed247e56908818389c1a5abbf6a36aab14f1057d/lupa-2.6-cp311-cp311-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:561a8e3be800827884e767a694727ed8482d066e0d6edfcbf423b05e63b05535", size = 1165844, upload-time = "2025-10-24T07:17:45.437Z" }, + { url = "https://files.pythonhosted.org/packages/1c/26/9f1154c6c95f175ccbf96aa96c8f569c87f64f463b32473e839137601a8b/lupa-2.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af880a62d47991cae78b8e9905c008cbfdc4a3a9723a66310c2634fc7644578c", size = 1048069, upload-time = "2025-10-24T07:17:47.181Z" }, + { url = "https://files.pythonhosted.org/packages/68/67/2cc52ab73d6af81612b2ea24c870d3fa398443af8e2875e5befe142398b1/lupa-2.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:80b22923aa4023c86c0097b235615f89d469a0c4eee0489699c494d3367c4c85", size = 2079079, upload-time = "2025-10-24T07:17:49.755Z" }, + { url = "https://files.pythonhosted.org/packages/2e/dc/f843f09bbf325f6e5ee61730cf6c3409fc78c010d968c7c78acba3019ca7/lupa-2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:153d2cc6b643f7efb9cfc0c6bb55ec784d5bac1a3660cfc5b958a7b8f38f4a75", size = 1071428, upload-time = "2025-10-24T07:17:51.991Z" }, + { url = "https://files.pythonhosted.org/packages/2e/60/37533a8d85bf004697449acb97ecdacea851acad28f2ad3803662487dd2a/lupa-2.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3fa8777e16f3ded50b72967dc17e23f5a08e4f1e2c9456aff2ebdb57f5b2869f", size = 1181756, upload-time = "2025-10-24T07:17:53.752Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f2/cf29b20dbb4927b6a3d27c339ac5d73e74306ecc28c8e2c900b2794142ba/lupa-2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8dbdcbe818c02a2f56f5ab5ce2de374dab03e84b25266cfbaef237829bc09b3f", size = 2175687, upload-time = "2025-10-24T07:17:56.228Z" }, + { url = "https://files.pythonhosted.org/packages/94/7c/050e02f80c7131b63db1474bff511e63c545b5a8636a24cbef3fc4da20b6/lupa-2.6-cp311-cp311-win32.whl", hash = "sha256:defaf188fde8f7a1e5ce3a5e6d945e533b8b8d547c11e43b96c9b7fe527f56dc", size = 1412592, upload-time = "2025-10-24T07:17:59.062Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/6f2af98aa5d771cea661f66c8eb8f53772ec1ab1dfbce24126cfcd189436/lupa-2.6-cp311-cp311-win_amd64.whl", hash = "sha256:9505ae600b5c14f3e17e70f87f88d333717f60411faca1ddc6f3e61dce85fa9e", size = 1669194, upload-time = "2025-10-24T07:18:01.647Z" }, + { url = "https://files.pythonhosted.org/packages/94/86/ce243390535c39d53ea17ccf0240815e6e457e413e40428a658ea4ee4b8d/lupa-2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47ce718817ef1cc0c40d87c3d5ae56a800d61af00fbc0fad1ca9be12df2f3b56", size = 951707, upload-time = "2025-10-24T07:18:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/cedea5e6cbeb54396fdcc55f6b741696f3f036d23cfaf986d50d680446da/lupa-2.6-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7aba985b15b101495aa4b07112cdc08baa0c545390d560ad5cfde2e9e34f4d58", size = 1916703, upload-time = "2025-10-24T07:18:05.6Z" }, + { url = "https://files.pythonhosted.org/packages/24/be/3d6b5f9a8588c01a4d88129284c726017b2089f3a3fd3ba8bd977292fea0/lupa-2.6-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:b766f62f95b2739f2248977d29b0722e589dcf4f0ccfa827ccbd29f0148bd2e5", size = 985152, upload-time = "2025-10-24T07:18:08.561Z" }, + { url = "https://files.pythonhosted.org/packages/eb/23/9f9a05beee5d5dce9deca4cb07c91c40a90541fc0a8e09db4ee670da550f/lupa-2.6-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:00a934c23331f94cb51760097ebfab14b005d55a6b30a2b480e3c53dd2fa290d", size = 1159599, upload-time = "2025-10-24T07:18:10.346Z" }, + { url = "https://files.pythonhosted.org/packages/40/4e/e7c0583083db9d7f1fd023800a9767d8e4391e8330d56c2373d890ac971b/lupa-2.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21de9f38bd475303e34a042b7081aabdf50bd9bafd36ce4faea2f90fd9f15c31", size = 1038686, upload-time = "2025-10-24T07:18:12.112Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/5a4f7d959d4feba5e203ff0c31889e74d1ca3153122be4a46dca7d92bf7c/lupa-2.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf3bda96d3fc41237e964a69c23647d50d4e28421111360274d4799832c560e9", size = 2071956, upload-time = "2025-10-24T07:18:14.572Z" }, + { url = "https://files.pythonhosted.org/packages/92/34/2f4f13ca65d01169b1720176aedc4af17bc19ee834598c7292db232cb6dc/lupa-2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a76ead245da54801a81053794aa3975f213221f6542d14ec4b859ee2e7e0323", size = 1057199, upload-time = "2025-10-24T07:18:16.379Z" }, + { url = "https://files.pythonhosted.org/packages/35/2a/5f7d2eebec6993b0dcd428e0184ad71afb06a45ba13e717f6501bfed1da3/lupa-2.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8dd0861741caa20886ddbda0a121d8e52fb9b5bb153d82fa9bba796962bf30e8", size = 1173693, upload-time = "2025-10-24T07:18:18.153Z" }, + { url = "https://files.pythonhosted.org/packages/e4/29/089b4d2f8e34417349af3904bb40bec40b65c8731f45e3fd8d497ca573e5/lupa-2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:239e63948b0b23023f81d9a19a395e768ed3da6a299f84e7963b8f813f6e3f9c", size = 2164394, upload-time = "2025-10-24T07:18:20.403Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1b/79c17b23c921f81468a111cad843b076a17ef4b684c4a8dff32a7969c3f0/lupa-2.6-cp312-cp312-win32.whl", hash = "sha256:325894e1099499e7a6f9c351147661a2011887603c71086d36fe0f964d52d1ce", size = 1420647, upload-time = "2025-10-24T07:18:23.368Z" }, + { url = "https://files.pythonhosted.org/packages/b8/15/5121e68aad3584e26e1425a5c9a79cd898f8a152292059e128c206ee817c/lupa-2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c735a1ce8ee60edb0fe71d665f1e6b7c55c6021f1d340eb8c865952c602cd36f", size = 1688529, upload-time = "2025-10-24T07:18:25.523Z" }, + { url = "https://files.pythonhosted.org/packages/28/1d/21176b682ca5469001199d8b95fa1737e29957a3d185186e7a8b55345f2e/lupa-2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:663a6e58a0f60e7d212017d6678639ac8df0119bc13c2145029dcba084391310", size = 947232, upload-time = "2025-10-24T07:18:27.878Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4c/d327befb684660ca13cf79cd1f1d604331808f9f1b6fb6bf57832f8edf80/lupa-2.6-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d1f5afda5c20b1f3217a80e9bc1b77037f8a6eb11612fd3ada19065303c8f380", size = 1908625, upload-time = "2025-10-24T07:18:29.944Z" }, + { url = "https://files.pythonhosted.org/packages/66/8e/ad22b0a19454dfd08662237a84c792d6d420d36b061f239e084f29d1a4f3/lupa-2.6-cp313-cp313-macosx_11_0_x86_64.whl", hash = "sha256:26f2b3c085fe76e9119e48c1013c1cccdc1f51585d456858290475aa38e7089e", size = 981057, upload-time = "2025-10-24T07:18:31.553Z" }, + { url = "https://files.pythonhosted.org/packages/5c/48/74859073ab276bd0566c719f9ca0108b0cfc1956ca0d68678d117d47d155/lupa-2.6-cp313-cp313-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:60d2f902c7b96fb8ab98493dcff315e7bb4d0b44dc9dd76eb37de575025d5685", size = 1156227, upload-time = "2025-10-24T07:18:33.981Z" }, + { url = "https://files.pythonhosted.org/packages/09/6c/0e9ded061916877253c2266074060eb71ed99fb21d73c8c114a76725bce2/lupa-2.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a02d25dee3a3250967c36590128d9220ae02f2eda166a24279da0b481519cbff", size = 1035752, upload-time = "2025-10-24T07:18:36.32Z" }, + { url = "https://files.pythonhosted.org/packages/dd/ef/f8c32e454ef9f3fe909f6c7d57a39f950996c37a3deb7b391fec7903dab7/lupa-2.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6eae1ee16b886b8914ff292dbefbf2f48abfbdee94b33a88d1d5475e02423203", size = 2069009, upload-time = "2025-10-24T07:18:38.072Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/15b80c226a5225815a890ee1c11f07968e0aba7a852df41e8ae6fe285063/lupa-2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0edd5073a4ee74ab36f74fe61450148e6044f3952b8d21248581f3c5d1a58be", size = 1056301, upload-time = "2025-10-24T07:18:40.165Z" }, + { url = "https://files.pythonhosted.org/packages/31/14/2086c1425c985acfb30997a67e90c39457122df41324d3c179d6ee2292c6/lupa-2.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0c53ee9f22a8a17e7d4266ad48e86f43771951797042dd51d1494aaa4f5f3f0a", size = 1170673, upload-time = "2025-10-24T07:18:42.426Z" }, + { url = "https://files.pythonhosted.org/packages/10/e5/b216c054cf86576c0191bf9a9f05de6f7e8e07164897d95eea0078dca9b2/lupa-2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:de7c0f157a9064a400d828789191a96da7f4ce889969a588b87ec80de9b14772", size = 2162227, upload-time = "2025-10-24T07:18:46.112Z" }, + { url = "https://files.pythonhosted.org/packages/59/2f/33ecb5bedf4f3bc297ceacb7f016ff951331d352f58e7e791589609ea306/lupa-2.6-cp313-cp313-win32.whl", hash = "sha256:ee9523941ae0a87b5b703417720c5d78f72d2f5bc23883a2ea80a949a3ed9e75", size = 1419558, upload-time = "2025-10-24T07:18:48.371Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b4/55e885834c847ea610e111d87b9ed4768f0afdaeebc00cd46810f25029f6/lupa-2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b1335a5835b0a25ebdbc75cf0bda195e54d133e4d994877ef025e218c2e59db9", size = 1683424, upload-time = "2025-10-24T07:18:50.976Z" }, + { url = "https://files.pythonhosted.org/packages/66/9d/d9427394e54d22a35d1139ef12e845fd700d4872a67a34db32516170b746/lupa-2.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:dcb6d0a3264873e1653bc188499f48c1fb4b41a779e315eba45256cfe7bc33c1", size = 953818, upload-time = "2025-10-24T07:18:53.378Z" }, + { url = "https://files.pythonhosted.org/packages/10/41/27bbe81953fb2f9ecfced5d9c99f85b37964cfaf6aa8453bb11283983721/lupa-2.6-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:a37e01f2128f8c36106726cb9d360bac087d58c54b4522b033cc5691c584db18", size = 1915850, upload-time = "2025-10-24T07:18:55.259Z" }, + { url = "https://files.pythonhosted.org/packages/a3/98/f9ff60db84a75ba8725506bbf448fb085bc77868a021998ed2a66d920568/lupa-2.6-cp314-cp314-macosx_11_0_x86_64.whl", hash = "sha256:458bd7e9ff3c150b245b0fcfbb9bd2593d1152ea7f0a7b91c1d185846da033fe", size = 982344, upload-time = "2025-10-24T07:18:57.05Z" }, + { url = "https://files.pythonhosted.org/packages/41/f7/f39e0f1c055c3b887d86b404aaf0ca197b5edfd235a8b81b45b25bac7fc3/lupa-2.6-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:052ee82cac5206a02df77119c325339acbc09f5ce66967f66a2e12a0f3211cad", size = 1156543, upload-time = "2025-10-24T07:18:59.251Z" }, + { url = "https://files.pythonhosted.org/packages/9e/9c/59e6cffa0d672d662ae17bd7ac8ecd2c89c9449dee499e3eb13ca9cd10d9/lupa-2.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96594eca3c87dd07938009e95e591e43d554c1dbd0385be03c100367141db5a8", size = 1047974, upload-time = "2025-10-24T07:19:01.449Z" }, + { url = "https://files.pythonhosted.org/packages/23/c6/a04e9cef7c052717fcb28fb63b3824802488f688391895b618e39be0f684/lupa-2.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8faddd9d198688c8884091173a088a8e920ecc96cda2ffed576a23574c4b3f6", size = 2073458, upload-time = "2025-10-24T07:19:03.369Z" }, + { url = "https://files.pythonhosted.org/packages/e6/10/824173d10f38b51fc77785228f01411b6ca28826ce27404c7c912e0e442c/lupa-2.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:daebb3a6b58095c917e76ba727ab37b27477fb926957c825205fbda431552134", size = 1067683, upload-time = "2025-10-24T07:19:06.2Z" }, + { url = "https://files.pythonhosted.org/packages/b6/dc/9692fbcf3c924d9c4ece2d8d2f724451ac2e09af0bd2a782db1cef34e799/lupa-2.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f3154e68972befe0f81564e37d8142b5d5d79931a18309226a04ec92487d4ea3", size = 1171892, upload-time = "2025-10-24T07:19:08.544Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/e318b628d4643c278c96ab3ddea07fc36b075a57383c837f5b11e537ba9d/lupa-2.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e4dadf77b9fedc0bfa53417cc28dc2278a26d4cbd95c29f8927ad4d8fe0a7ef9", size = 2166641, upload-time = "2025-10-24T07:19:10.485Z" }, + { url = "https://files.pythonhosted.org/packages/12/f7/a6f9ec2806cf2d50826980cdb4b3cffc7691dc6f95e13cc728846d5cb793/lupa-2.6-cp314-cp314-win32.whl", hash = "sha256:cb34169c6fa3bab3e8ac58ca21b8a7102f6a94b6a5d08d3636312f3f02fafd8f", size = 1456857, upload-time = "2025-10-24T07:19:37.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/de/df71896f25bdc18360fdfa3b802cd7d57d7fede41a0e9724a4625b412c85/lupa-2.6-cp314-cp314-win_amd64.whl", hash = "sha256:b74f944fe46c421e25d0f8692aef1e842192f6f7f68034201382ac440ef9ea67", size = 1731191, upload-time = "2025-10-24T07:19:40.281Z" }, + { url = "https://files.pythonhosted.org/packages/47/3c/a1f23b01c54669465f5f4c4083107d496fbe6fb45998771420e9aadcf145/lupa-2.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0e21b716408a21ab65723f8841cf7f2f37a844b7a965eeabb785e27fca4099cf", size = 999343, upload-time = "2025-10-24T07:19:12.519Z" }, + { url = "https://files.pythonhosted.org/packages/c5/6d/501994291cb640bfa2ccf7f554be4e6914afa21c4026bd01bff9ca8aac57/lupa-2.6-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:589db872a141bfff828340079bbdf3e9a31f2689f4ca0d88f97d9e8c2eae6142", size = 2000730, upload-time = "2025-10-24T07:19:14.869Z" }, + { url = "https://files.pythonhosted.org/packages/53/a5/457ffb4f3f20469956c2d4c4842a7675e884efc895b2f23d126d23e126cc/lupa-2.6-cp314-cp314t-macosx_11_0_x86_64.whl", hash = "sha256:cd852a91a4a9d4dcbb9a58100f820a75a425703ec3e3f049055f60b8533b7953", size = 1021553, upload-time = "2025-10-24T07:19:17.123Z" }, + { url = "https://files.pythonhosted.org/packages/51/6b/36bb5a5d0960f2a5c7c700e0819abb76fd9bf9c1d8a66e5106416d6e9b14/lupa-2.6-cp314-cp314t-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:0334753be028358922415ca97a64a3048e4ed155413fc4eaf87dd0a7e2752983", size = 1133275, upload-time = "2025-10-24T07:19:20.51Z" }, + { url = "https://files.pythonhosted.org/packages/19/86/202ff4429f663013f37d2229f6176ca9f83678a50257d70f61a0a97281bf/lupa-2.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:661d895cd38c87658a34780fac54a690ec036ead743e41b74c3fb81a9e65a6aa", size = 1038441, upload-time = "2025-10-24T07:19:22.509Z" }, + { url = "https://files.pythonhosted.org/packages/a7/42/d8125f8e420714e5b52e9c08d88b5329dfb02dcca731b4f21faaee6cc5b5/lupa-2.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aa58454ccc13878cc177c62529a2056be734da16369e451987ff92784994ca7", size = 2058324, upload-time = "2025-10-24T07:19:24.979Z" }, + { url = "https://files.pythonhosted.org/packages/2b/2c/47bf8b84059876e877a339717ddb595a4a7b0e8740bacae78ba527562e1c/lupa-2.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1425017264e470c98022bba8cff5bd46d054a827f5df6b80274f9cc71dafd24f", size = 1060250, upload-time = "2025-10-24T07:19:27.262Z" }, + { url = "https://files.pythonhosted.org/packages/c2/06/d88add2b6406ca1bdec99d11a429222837ca6d03bea42ca75afa169a78cb/lupa-2.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:224af0532d216e3105f0a127410f12320f7c5f1aa0300bdf9646b8d9afb0048c", size = 1151126, upload-time = "2025-10-24T07:19:29.522Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a0/89e6a024c3b4485b89ef86881c9d55e097e7cb0bdb74efb746f2fa6a9a76/lupa-2.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9abb98d5a8fd27c8285302e82199f0e56e463066f88f619d6594a450bf269d80", size = 2153693, upload-time = "2025-10-24T07:19:31.379Z" }, + { url = "https://files.pythonhosted.org/packages/b6/36/a0f007dc58fc1bbf51fb85dcc82fcb1f21b8c4261361de7dab0e3d8521ef/lupa-2.6-cp314-cp314t-win32.whl", hash = "sha256:1849efeba7a8f6fb8aa2c13790bee988fd242ae404bd459509640eeea3d1e291", size = 1590104, upload-time = "2025-10-24T07:19:33.514Z" }, + { url = "https://files.pythonhosted.org/packages/7d/5e/db903ce9cf82c48d6b91bf6d63ae4c8d0d17958939a4e04ba6b9f38b8643/lupa-2.6-cp314-cp314t-win_amd64.whl", hash = "sha256:fc1498d1a4fc028bc521c26d0fad4ca00ed63b952e32fb95949bda76a04bad52", size = 1913818, upload-time = "2025-10-24T07:19:36.039Z" }, +] + [[package]] name = "markdown-it-py" version = "4.0.0" @@ -981,6 +1178,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] +[[package]] +name = "more-itertools" +version = "10.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, +] + [[package]] name = "openai" version = "2.6.1" @@ -1012,6 +1218,60 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/12/cf/03675d8bd8ecbf4445504d8071adab19f5f993676795708e36402ab38263/openapi_pydantic-0.5.1-py3-none-any.whl", hash = "sha256:a3a09ef4586f5bd760a8df7f43028b60cafb6d9f61de2acba9574766255ab146", size = 96381, upload-time = "2025-01-08T19:29:25.275Z" }, ] +[[package]] +name = "opentelemetry-api" +version = "1.38.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/d8/0f354c375628e048bd0570645b310797299754730079853095bf000fba69/opentelemetry_api-1.38.0.tar.gz", hash = "sha256:f4c193b5e8acb0912b06ac5b16321908dd0843d75049c091487322284a3eea12", size = 65242, upload-time = "2025-10-16T08:35:50.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/a2/d86e01c28300bd41bab8f18afd613676e2bd63515417b77636fc1add426f/opentelemetry_api-1.38.0-py3-none-any.whl", hash = "sha256:2891b0197f47124454ab9f0cf58f3be33faca394457ac3e09daba13ff50aa582", size = 65947, upload-time = "2025-10-16T08:35:30.23Z" }, +] + +[[package]] +name = "opentelemetry-exporter-prometheus" +version = "0.59b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, + { name = "prometheus-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/07/39370ec7eacfca10462121a0e036b66ccea3a616bf6ae6ea5fdb72e5009d/opentelemetry_exporter_prometheus-0.59b0.tar.gz", hash = "sha256:d64f23c49abb5a54e271c2fbc8feacea0c394a30ec29876ab5ef7379f08cf3d7", size = 14972, upload-time = "2025-10-16T08:35:55.973Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/ea/3005a732002242fd86203989520bdd5a752e1fd30dc225d5d45751ea19fb/opentelemetry_exporter_prometheus-0.59b0-py3-none-any.whl", hash = "sha256:71ced23207abd15b30d1fe4e7e910dcaa7c2ff1f24a6ffccbd4fdded676f541b", size = 13017, upload-time = "2025-10-16T08:35:37.253Z" }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.38.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/cb/f0eee1445161faf4c9af3ba7b848cc22a50a3d3e2515051ad8628c35ff80/opentelemetry_sdk-1.38.0.tar.gz", hash = "sha256:93df5d4d871ed09cb4272305be4d996236eedb232253e3ab864c8620f051cebe", size = 171942, upload-time = "2025-10-16T08:36:02.257Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/2e/e93777a95d7d9c40d270a371392b6d6f1ff170c2a3cb32d6176741b5b723/opentelemetry_sdk-1.38.0-py3-none-any.whl", hash = "sha256:1c66af6564ecc1553d72d811a01df063ff097cdc82ce188da9951f93b8d10f6b", size = 132349, upload-time = "2025-10-16T08:35:46.995Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.59b0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/40/bc/8b9ad3802cd8ac6583a4eb7de7e5d7db004e89cb7efe7008f9c8a537ee75/opentelemetry_semantic_conventions-0.59b0.tar.gz", hash = "sha256:7a6db3f30d70202d5bf9fa4b69bc866ca6a30437287de6c510fb594878aed6b0", size = 129861, upload-time = "2025-10-16T08:36:03.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/24/7d/c88d7b15ba8fe5c6b8f93be50fc11795e9fc05386c44afaf6b76fe191f9b/opentelemetry_semantic_conventions-0.59b0-py3-none-any.whl", hash = "sha256:35d3b8833ef97d614136e253c1da9342b4c3c083bbaf29ce31d572a1c3825eed", size = 207954, upload-time = "2025-10-16T08:35:48.054Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -1117,6 +1377,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f0/c3/6b40262600bf7daeaa65a9ded17244c2f4b0f6d032119495ec28326a6978/prek-0.2.12-py3-none-win_arm64.whl", hash = "sha256:8f203c05afa4e7126d86d16ad1916b9676cbed127e95dcb230a06299bc5bcf6b", size = 4528164, upload-time = "2025-10-27T12:23:01.219Z" }, ] +[[package]] +name = "prometheus-client" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481, upload-time = "2025-09-18T20:47:25.043Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" }, +] + [[package]] name = "prompt-toolkit" version = "3.0.51" @@ -1180,9 +1449,15 @@ disk = [ { name = "diskcache" }, { name = "pathvalidate" }, ] +keyring = [ + { name = "keyring" }, +] memory = [ { name = "cachetools" }, ] +redis = [ + { name = "redis" }, +] [[package]] name = "py-key-value-shared" @@ -1327,6 +1602,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" }, ] +[[package]] +name = "pydocket" +version = "0.14.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cloudpickle" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "fakeredis", extra = ["lua"] }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-prometheus" }, + { name = "prometheus-client" }, + { name = "py-key-value-aio", extra = ["memory", "redis"] }, + { name = "python-json-logger" }, + { name = "redis" }, + { name = "rich" }, + { name = "typer" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/ff/2ba53c6e329ac8dfbbb9780903b391b3dc83ad662e3b82e83f4cc28f40fe/pydocket-0.14.1.tar.gz", hash = "sha256:64a3ea650b81df2ccae12bac80cb8cdf5251b38c2b8a5f1446cde37a17091a1a", size = 261950, upload-time = "2025-11-20T20:43:06.202Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/ef/b463aada1d9b5bc8c10d5f07d0bdc9f2a7b5e57a959e4690e529086abeaa/pydocket-0.14.1-py3-none-any.whl", hash = "sha256:7ba895605aeed91f14deeafb20720bbe2b662c7e840bde34d2f1c98e041afd1c", size = 57339, upload-time = "2025-11-20T20:43:05.058Z" }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -1569,6 +1867,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, ] +[[package]] +name = "python-json-logger" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" }, +] + [[package]] name = "python-multipart" version = "0.0.20" @@ -1600,6 +1907,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, ] +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471, upload-time = "2024-08-14T10:15:34.626Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756, upload-time = "2024-08-14T10:15:33.187Z" }, +] + [[package]] name = "pyyaml" version = "6.0.2" @@ -1644,6 +1960,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, ] +[[package]] +name = "redis" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669, upload-time = "2025-11-19T15:54:39.961Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/f0/8956f8a86b20d7bb9d6ac0187cf4cd54d8065bc9a1a09eb8011d4d326596/redis-7.1.0-py3-none-any.whl", hash = "sha256:23c52b208f92b56103e17c5d06bdc1a6c2c0b3106583985a76a18f83b265de2b", size = 354159, upload-time = "2025-11-19T15:54:38.064Z" }, +] + [[package]] name = "referencing" version = "0.36.2" @@ -1859,6 +2187,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/5c/799a1efb8b5abab56e8a9f2a0b72d12bd64bb55815e9476c7d0a2887d2f7/ruff-0.12.8-py3-none-win_arm64.whl", hash = "sha256:c90e1a334683ce41b0e7a04f41790c429bf5073b62c1ae701c9dc5b3d14f0749", size = 11884718, upload-time = "2025-08-07T19:05:42.866Z" }, ] +[[package]] +name = "secretstorage" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "jeepney" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/03/e834bcd866f2f8a49a85eaff47340affa3bfa391ee9912a952a1faa68c7b/secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be", size = 19884, upload-time = "2025-11-23T19:02:53.191Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/46/f5af3402b579fd5e11573ce652019a67074317e18c1935cc0b4ba9b35552/secretstorage-3.5.0-py3-none-any.whl", hash = "sha256:0ce65888c0725fcb2c5bc0fdb8e5438eece02c523557ea40ce0703c266248137", size = 15554, upload-time = "2025-11-23T19:02:51.545Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -1868,6 +2218,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, +] + [[package]] name = "sse-starlette" version = "3.0.2" @@ -1992,6 +2351,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2f/c1/f6be8cdd0bf387c1d8ee9d14bb299b7b5d2c0532f550a6693216a32ec0c5/ty-0.0.1a25-py3-none-win_arm64.whl", hash = "sha256:dde2962d448ed87c48736e9a4bb13715a4cced705525e732b1c0dac1d4c66e3d", size = 8536832, upload-time = "2025-10-29T19:40:22.014Z" }, ] +[[package]] +name = "typer" +version = "0.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8f/28/7c85c8032b91dbe79725b6f17d2fffc595dff06a35c7a30a37bef73a1ab4/typer-0.20.0.tar.gz", hash = "sha256:1aaf6494031793e4876fb0bacfa6a912b551cf43c1e63c800df8b1a866720c37", size = 106492, upload-time = "2025-10-20T17:03:49.445Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/64/7713ffe4b5983314e9d436a90d5bd4f63b6054e2aca783a3cfc44cb95bbf/typer-0.20.0-py3-none-any.whl", hash = "sha256:5b463df6793ec1dca6213a3cf4c0f03bc6e322ac5e16e13ddd622a889489784a", size = 47028, upload-time = "2025-10-20T17:03:47.617Z" }, +] + [[package]] name = "typing-extensions" version = "4.15.0" @@ -2103,3 +2477,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122", size = 176884, upload-time = "2025-03-05T20:03:27.934Z" }, { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, ] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +]