diff --git a/.github/workflows/claude-code-review.yml b/.github/workflows/claude-code-review.yml deleted file mode 100644 index 514f979d7..000000000 --- a/.github/workflows/claude-code-review.yml +++ /dev/null @@ -1,33 +0,0 @@ -# Source: https://github.com/anthropics/claude-code-action/blob/main/docs/code-review.md -name: Claude Code Review - -on: - pull_request: - types: [opened, synchronize, ready_for_review, reopened] - -jobs: - claude-review: - # Fork PRs don't have access to secrets or OIDC tokens, so the action - # cannot authenticate. See https://github.com/anthropics/claude-code-action/issues/339 - if: github.event.pull_request.head.repo.fork == false && github.actor != 'dependabot[bot]' - runs-on: ubuntu-latest - permissions: - contents: read - pull-requests: read - issues: read - id-token: write - - steps: - - name: Checkout repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - with: - fetch-depth: 1 - - - name: Run Claude Code Review - id: claude-review - uses: anthropics/claude-code-action@2f8ba26a219c06cfb0f468eef8d97055fa814f97 # v1.0.53 - with: - anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} - plugin_marketplaces: "https://github.com/anthropics/claude-code.git" - plugins: "code-review@claude-code-plugins" - prompt: "/code-review:code-review ${{ github.repository }}/pull/${{ github.event.pull_request.number }}" diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 000000000..307bd81b3 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,140 @@ +# Development Guidelines + +## Branching Model + + + +- `main` is currently the V2 rework. Breaking changes are expected here — when removing or + replacing an API, delete it outright and document the change in + `docs/migration.md`. Do not add `@deprecated` shims or backward-compat layers + on `main`. +- `v1.x` is the release branch for the current stable line. Backport PRs target + this branch and use a `[v1.x]` title prefix. +- `README.md` is frozen at v1 (a pre-commit hook rejects edits). Edit + `README.v2.md` instead. + +## Package Management + +- ONLY use uv, NEVER pip +- Installation: `uv add ` +- Running tools: `uv run --frozen `. Always pass `--frozen` so uv doesn't + rewrite `uv.lock` as a side effect. +- Cross-version testing: `uv run --frozen --python 3.10 pytest ...` to run + against a specific interpreter (CI covers 3.10–3.14). +- Upgrading: `uv lock --upgrade-package ` +- FORBIDDEN: `uv pip install`, `@latest` syntax +- Don't raise dependency floors for CVEs alone. The `>=` constraint already + lets users upgrade. Only raise a floor when the SDK needs functionality from + the newer version, and don't add SDK code to work around a dependency's + vulnerability. See Kludex/uvicorn#2643 and python-sdk #1552 for reasoning. + +## Code Quality + +- Type hints required for all code +- Public APIs must have docstrings. When a public API raises exceptions a + caller would reasonably catch, document them in a `Raises:` section. Don't + list exceptions from argument validation or programmer error. +- `src/mcp/__init__.py` defines the public API surface via `__all__`. Adding a + symbol there is a deliberate API decision, not a convenience re-export. +- IMPORTANT: All imports go at the top of the file — inline imports hide + dependencies and obscure circular-import bugs. Only exception: when a + top-level import genuinely can't work (lazy-loading optional deps, or + tests that re-import a module). + +## Testing + +- Framework: `uv run --frozen pytest` +- Async testing: use anyio, not asyncio +- Do not use `Test` prefixed classes — write plain top-level `test_*` functions. + Legacy files still contain `Test*` classes; do NOT follow that pattern for new + tests even when adding to such a file. +- IMPORTANT: Tests should be fast and deterministic. Prefer in-memory async execution; + reach for threads only when necessary, and subprocesses only as a last resort. +- For end-to-end behavior, an in-memory `Client(server)` is usually the + cleanest approach (see `tests/client/test_client.py` for the canonical + pattern). For narrower changes, testing the function directly is fine. Use + judgment. +- Test files mirror the source tree: `src/mcp/client/stdio.py` → + `tests/client/test_stdio.py`. Add tests to the existing file for that module. +- Avoid `anyio.sleep()` with a fixed duration to wait for async operations. Instead: + - Use `anyio.Event` — set it in the callback/handler, `await event.wait()` in the test + - For stream messages, use `await stream.receive()` instead of `sleep()` + `receive_nowait()` + - Exception: `sleep()` is appropriate when testing time-based features (e.g., timeouts) +- Wrap indefinite waits (`event.wait()`, `stream.receive()`) in `anyio.fail_after(5)` to prevent hangs +- Pytest is configured with `filterwarnings = ["error"]`, so warnings fail + tests. Don't silence warnings from your own code; fix the underlying cause. + Scoped `ignore::` entries for upstream libraries are acceptable in + `pyproject.toml` with a comment explaining why. + +### Coverage + +CI requires 100% (`fail_under = 100`, `branch = true`). + +- Full check: `./scripts/test` (~23s). Runs coverage + `strict-no-cover` on the + default Python. Not identical to CI: CI runs 3.10–3.14 × {ubuntu, windows} + × {locked, lowest-direct}, and some branch-coverage quirks only surface on + specific matrix entries. +- Targeted check while iterating (~4s, deterministic): + + ```bash + uv run --frozen coverage erase + uv run --frozen coverage run -m pytest tests/path/test_foo.py + uv run --frozen coverage combine + uv run --frozen coverage report --include='src/mcp/path/foo.py' --fail-under=0 + # UV_FROZEN=1 propagates --frozen to the uv subprocess strict-no-cover spawns + UV_FROZEN=1 uv run --frozen strict-no-cover + ``` + + Partial runs can't hit 100% (coverage tracks `tests/` too), so `--fail-under=0` + and `--include` scope the report. `strict-no-cover` has no false positives on + partial runs — if your new test executes a line marked `# pragma: no cover`, + even a single-file run catches it. + +Avoid adding new `# pragma: no cover`, `# type: ignore`, or `# noqa` comments. +In tests, use `assert isinstance(x, T)` to narrow types instead of +`# type: ignore`. In library code (`src/`), a `# pragma: no cover` needs very +good reasoning — it usually means a test is missing. Audit before pushing: + +```bash +git diff origin/main... | grep -E '^\+.*(pragma|type: ignore|noqa)' +``` + +What the existing pragmas mean: + +- `# pragma: no cover` — line is never executed. CI's `strict-no-cover` (skipped + on Windows runners) fails if it IS executed. When your test starts covering + such a line, remove the pragma. +- `# pragma: lax no cover` — excluded from coverage but not checked by + `strict-no-cover`. Use for lines covered on some platforms/versions but not + others. +- `# pragma: no branch` — excludes branch arcs only. coverage.py misreports the + `->exit` arc for nested `async with` on Python 3.11+ (worse on 3.14/Windows). + +## Breaking Changes + +When making breaking changes, document them in `docs/migration.md`. Include: + +- What changed +- Why it changed +- How to migrate existing code + +Search for related sections in the migration guide and group related changes together +rather than adding new standalone sections. + +## Formatting & Type Checking + +- Format: `uv run --frozen ruff format .` +- Lint: `uv run --frozen ruff check . --fix` +- Type check: `uv run --frozen pyright` +- Pre-commit runs all of the above plus markdownlint, a `uv.lock` consistency + check, and README checks — see `.pre-commit-config.yaml` + +## Exception Handling + +- **Always use `logger.exception()` instead of `logger.error()` when catching exceptions** + - Don't include the exception in the message: `logger.exception("Failed")` not `logger.exception(f"Failed: {e}")` +- **Catch specific exceptions** where possible: + - File ops: `except (OSError, PermissionError):` + - JSON: `except json.JSONDecodeError:` + - Network: `except (ConnectionError, TimeoutError):` +- **FORBIDDEN** `except Exception:` - unless in top-level handlers diff --git a/CLAUDE.md b/CLAUDE.md index 2eee085e1..43c994c2d 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,174 +1 @@ -# Development Guidelines - -This document contains critical information about working with this codebase. Follow these guidelines precisely. - -## Core Development Rules - -1. Package Management - - ONLY use uv, NEVER pip - - Installation: `uv add ` - - Running tools: `uv run ` - - Upgrading: `uv lock --upgrade-package ` - - FORBIDDEN: `uv pip install`, `@latest` syntax - -2. Code Quality - - Type hints required for all code - - Public APIs must have docstrings - - Functions must be focused and small - - Follow existing patterns exactly - - Line length: 120 chars maximum - - FORBIDDEN: imports inside functions. THEY SHOULD BE AT THE TOP OF THE FILE. - -3. Testing Requirements - - Framework: `uv run --frozen pytest` - - Async testing: use anyio, not asyncio - - Do not use `Test` prefixed classes, use functions - - Coverage: test edge cases and errors - - New features require tests - - Bug fixes require regression tests - - IMPORTANT: The `tests/client/test_client.py` is the most well designed test file. Follow its patterns. - - IMPORTANT: Be minimal, and focus on E2E tests: Use the `mcp.client.Client` whenever possible. - - Coverage: CI requires 100% (`fail_under = 100`, `branch = true`). - - Full check: `./scripts/test` (~23s). Runs coverage + `strict-no-cover` on the - default Python. Not identical to CI: CI also runs 3.10–3.14 × {ubuntu, windows}, - and some branch-coverage quirks only surface on specific matrix entries. - - Targeted check while iterating (~4s, deterministic): - - ```bash - uv run --frozen coverage erase - uv run --frozen coverage run -m pytest tests/path/test_foo.py - uv run --frozen coverage combine - uv run --frozen coverage report --include='src/mcp/path/foo.py' --fail-under=0 - UV_FROZEN=1 uv run --frozen strict-no-cover - ``` - - Partial runs can't hit 100% (coverage tracks `tests/` too), so `--fail-under=0` - and `--include` scope the report. `strict-no-cover` has no false positives on - partial runs — if your new test executes a line marked `# pragma: no cover`, - even a single-file run catches it. - - Coverage pragmas: - - `# pragma: no cover` — line is never executed. CI's `strict-no-cover` fails if - it IS executed. When your test starts covering such a line, remove the pragma. - - `# pragma: lax no cover` — excluded from coverage but not checked by - `strict-no-cover`. Use for lines covered on some platforms/versions but not - others. - - `# pragma: no branch` — excludes branch arcs only. coverage.py misreports the - `->exit` arc for nested `async with` on Python 3.11+ (worse on 3.14/Windows). - - Avoid `anyio.sleep()` with a fixed duration to wait for async operations. Instead: - - Use `anyio.Event` — set it in the callback/handler, `await event.wait()` in the test - - For stream messages, use `await stream.receive()` instead of `sleep()` + `receive_nowait()` - - Exception: `sleep()` is appropriate when testing time-based features (e.g., timeouts) - - Wrap indefinite waits (`event.wait()`, `stream.receive()`) in `anyio.fail_after(5)` to prevent hangs - -Test files mirror the source tree: `src/mcp/client/streamable_http.py` → `tests/client/test_streamable_http.py` -Add tests to the existing file for that module. - -- For commits fixing bugs or adding features based on user reports add: - - ```bash - git commit --trailer "Reported-by:" - ``` - - Where `` is the name of the user. - -- For commits related to a Github issue, add - - ```bash - git commit --trailer "Github-Issue:#" - ``` - -- NEVER ever mention a `co-authored-by` or similar aspects. In particular, never - mention the tool used to create the commit message or PR. - -## Pull Requests - -- Create a detailed message of what changed. Focus on the high level description of - the problem it tries to solve, and how it is solved. Don't go into the specifics of the - code unless it adds clarity. - -- NEVER ever mention a `co-authored-by` or similar aspects. In particular, never - mention the tool used to create the commit message or PR. - -## Breaking Changes - -When making breaking changes, document them in `docs/migration.md`. Include: - -- What changed -- Why it changed -- How to migrate existing code - -Search for related sections in the migration guide and group related changes together -rather than adding new standalone sections. - -## Python Tools - -## Code Formatting - -1. Ruff - - Format: `uv run --frozen ruff format .` - - Check: `uv run --frozen ruff check .` - - Fix: `uv run --frozen ruff check . --fix` - - Critical issues: - - Line length (88 chars) - - Import sorting (I001) - - Unused imports - - Line wrapping: - - Strings: use parentheses - - Function calls: multi-line with proper indent - - Imports: try to use a single line - -2. Type Checking - - Tool: `uv run --frozen pyright` - - Requirements: - - Type narrowing for strings - - Version warnings can be ignored if checks pass - -3. Pre-commit - - Config: `.pre-commit-config.yaml` - - Runs: on git commit - - Tools: Prettier (YAML/JSON), Ruff (Python) - - Ruff updates: - - Check PyPI versions - - Update config rev - - Commit config first - -## Error Resolution - -1. CI Failures - - Fix order: - 1. Formatting - 2. Type errors - 3. Linting - - Type errors: - - Get full line context - - Check Optional types - - Add type narrowing - - Verify function signatures - -2. Common Issues - - Line length: - - Break strings with parentheses - - Multi-line function calls - - Split imports - - Types: - - Add None checks - - Narrow string types - - Match existing patterns - -3. Best Practices - - Check git status before commits - - Run formatters before type checks - - Keep changes minimal - - Follow existing patterns - - Document public APIs - - Test thoroughly - -## Exception Handling - -- **Always use `logger.exception()` instead of `logger.error()` when catching exceptions** - - Don't include the exception in the message: `logger.exception("Failed")` not `logger.exception(f"Failed: {e}")` -- **Catch specific exceptions** where possible: - - File ops: `except (OSError, PermissionError):` - - JSON: `except json.JSONDecodeError:` - - Network: `except (ConnectionError, TimeoutError):` -- **FORBIDDEN** `except Exception:` - unless in top-level handlers +@AGENTS.md diff --git a/README.v2.md b/README.v2.md index 55d867586..d0851c04e 100644 --- a/README.v2.md +++ b/README.v2.md @@ -681,11 +681,11 @@ The Context object provides the following capabilities: - `ctx.mcp_server` - Access to the MCPServer server instance (see [MCPServer Properties](#mcpserver-properties)) - `ctx.session` - Access to the underlying session for advanced communication (see [Session Properties and Methods](#session-properties-and-methods)) - `ctx.request_context` - Access to request-specific data and lifespan resources (see [Request Context Properties](#request-context-properties)) -- `await ctx.debug(message)` - Send debug log message -- `await ctx.info(message)` - Send info log message -- `await ctx.warning(message)` - Send warning log message -- `await ctx.error(message)` - Send error log message -- `await ctx.log(level, message, logger_name=None)` - Send log with custom level +- `await ctx.debug(data)` - Send debug log message +- `await ctx.info(data)` - Send info log message +- `await ctx.warning(data)` - Send warning log message +- `await ctx.error(data)` - Send error log message +- `await ctx.log(level, data, logger_name=None)` - Send log with custom level - `await ctx.report_progress(progress, total=None, message=None)` - Report operation progress - `await ctx.read_resource(uri)` - Read a resource by URI - `await ctx.elicit(message, schema)` - Request additional information from user with validation diff --git a/docs/migration.md b/docs/migration.md index 3b47f9aad..8b70885e8 100644 --- a/docs/migration.md +++ b/docs/migration.md @@ -38,6 +38,7 @@ http_client = httpx.AsyncClient( headers={"Authorization": "Bearer token"}, timeout=httpx.Timeout(30, read=300), auth=my_auth, + follow_redirects=True, ) async with http_client: @@ -48,6 +49,8 @@ async with http_client: ... ``` +v1's internal client set `follow_redirects=True`; set it explicitly when supplying your own `httpx.AsyncClient` to preserve that behavior. + ### `get_session_id` callback removed from `streamable_http_client` The `get_session_id` callback (third element of the returned tuple) has been removed from `streamable_http_client`. The function now returns a 2-tuple `(read_stream, write_stream)` instead of a 3-tuple. @@ -100,6 +103,8 @@ async with http_client: The `headers`, `timeout`, `sse_read_timeout`, and `auth` parameters have been removed from `StreamableHTTPTransport`. Configure these on the `httpx.AsyncClient` instead (see example above). +Note: `sse_client` retains its `headers`, `timeout`, `sse_read_timeout`, and `auth` parameters — only the streamable HTTP transport changed. + ### Removed type aliases and classes The following deprecated type aliases and classes have been removed from `mcp.types`: @@ -126,6 +131,52 @@ from mcp.types import ContentBlock, ResourceTemplateReference # Use `str` instead of `Cursor` for pagination cursors ``` +### Field names changed from camelCase to snake_case + +All Pydantic model fields in `mcp.types` now use snake_case names for Python attribute access. The JSON wire format is unchanged — serialization still uses camelCase via Pydantic aliases. + +**Before (v1):** + +```python +result = await session.call_tool("my_tool", {"x": 1}) +if result.isError: + ... + +tools = await session.list_tools() +cursor = tools.nextCursor +schema = tools.tools[0].inputSchema +``` + +**After (v2):** + +```python +result = await session.call_tool("my_tool", {"x": 1}) +if result.is_error: + ... + +tools = await session.list_tools() +cursor = tools.next_cursor +schema = tools.tools[0].input_schema +``` + +Common renames: + +| v1 (camelCase) | v2 (snake_case) | +|----------------|-----------------| +| `inputSchema` | `input_schema` | +| `outputSchema` | `output_schema` | +| `isError` | `is_error` | +| `nextCursor` | `next_cursor` | +| `mimeType` | `mime_type` | +| `structuredContent` | `structured_content` | +| `serverInfo` | `server_info` | +| `protocolVersion` | `protocol_version` | +| `uriTemplate` | `uri_template` | +| `listChanged` | `list_changed` | +| `progressToken` | `progress_token` | + +Because `populate_by_name=True` is set, the old camelCase names still work as constructor kwargs (e.g., `Tool(inputSchema={...})` is accepted), but attribute access must use snake_case (`tool.input_schema`). + ### `args` parameter removed from `ClientSessionGroup.call_tool()` The deprecated `args` parameter has been removed from `ClientSessionGroup.call_tool()`. Use `arguments` instead. @@ -225,6 +276,28 @@ except MCPError as e: from mcp import MCPError ``` +The constructor signature also changed — it now takes `code`, `message`, and optional `data` directly instead of wrapping an `ErrorData`: + +**Before (v1):** + +```python +from mcp.shared.exceptions import McpError +from mcp.types import ErrorData, INVALID_REQUEST + +raise McpError(ErrorData(code=INVALID_REQUEST, message="bad input")) +``` + +**After (v2):** + +```python +from mcp.shared.exceptions import MCPError +from mcp.types import INVALID_REQUEST + +raise MCPError(INVALID_REQUEST, "bad input") +# or, if you already have an ErrorData: +raise MCPError.from_error_data(error_data) +``` + ### `FastMCP` renamed to `MCPServer` The `FastMCP` class has been renamed to `MCPServer` to better reflect its role as the main server class in the SDK. This is a simple rename with no functional changes to the class itself. @@ -240,11 +313,19 @@ mcp = FastMCP("Demo") **After (v2):** ```python -from mcp.server.mcpserver import MCPServer +from mcp.server.mcpserver import MCPServer, Context mcp = MCPServer("Demo") ``` +`Context` is the type annotation for the `ctx` parameter injected into tools, resources, and prompts (see [`get_context()` removed](#mcpserverget_context-removed) below). + +All submodules under `mcp.server.fastmcp.*` are now under `mcp.server.mcpserver.*` with the same structure. Common imports: + +- `Image`, `Audio` — from `mcp.server.mcpserver` (or `.utilities.types`) +- `UserMessage`, `AssistantMessage` — from `mcp.server.mcpserver.prompts.base` +- `ToolError`, `ResourceError` — from `mcp.server.mcpserver.exceptions` + ### `mount_path` parameter removed from MCPServer The `mount_path` parameter has been removed from `MCPServer.__init__()`, `MCPServer.run()`, `MCPServer.run_sse_async()`, and `MCPServer.sse_app()`. It was also removed from the `Settings` class. @@ -312,6 +393,8 @@ app = Starlette(routes=[Mount("/", app=mcp.streamable_http_app(json_response=Tru **Note:** DNS rebinding protection is automatically enabled when `host` is `127.0.0.1`, `localhost`, or `::1`. This now happens in `sse_app()` and `streamable_http_app()` instead of the constructor. +If you were mutating these via `mcp.settings` after construction (e.g., `mcp.settings.port = 9000`), pass them to `run()` / `sse_app()` / `streamable_http_app()` instead — these fields no longer exist on `Settings`. The `debug` and `log_level` parameters remain on the constructor. + ### `MCPServer.get_context()` removed `MCPServer.get_context()` has been removed. Context is now injected by the framework and passed explicitly — there is no ambient ContextVar to read from. @@ -331,6 +414,8 @@ async def my_tool(x: int) -> str: **After (v2):** ```python +from mcp.server.mcpserver import Context + @mcp.tool() async def my_tool(x: int, ctx: Context) -> str: await ctx.info("Processing...") @@ -343,6 +428,65 @@ async def my_tool(x: int, ctx: Context) -> str: The internal layers (`ToolManager.call_tool`, `Tool.run`, `Prompt.render`, `ResourceTemplate.create_resource`, etc.) now require `context` as a positional argument. +### Registering lowlevel handlers on `MCPServer` (workaround) + +`MCPServer` does not expose public APIs for `subscribe_resource`, `unsubscribe_resource`, or `set_logging_level` handlers. In v1, the workaround was to reach into the private lowlevel server and use its decorator methods: + +**Before (v1):** + +```python +@mcp._mcp_server.set_logging_level() # pyright: ignore[reportPrivateUsage] +async def handle_set_logging_level(level: str) -> None: + ... + +mcp._mcp_server.subscribe_resource()(handle_subscribe) # pyright: ignore[reportPrivateUsage] +``` + +In v2, the lowlevel `Server` no longer has decorator methods (handlers are constructor-only), so the equivalent workaround is `_add_request_handler`: + +**After (v2):** + +```python +from mcp.server import ServerRequestContext +from mcp.types import EmptyResult, SetLevelRequestParams, SubscribeRequestParams + + +async def handle_set_logging_level(ctx: ServerRequestContext, params: SetLevelRequestParams) -> EmptyResult: + ... + return EmptyResult() + + +async def handle_subscribe(ctx: ServerRequestContext, params: SubscribeRequestParams) -> EmptyResult: + ... + return EmptyResult() + + +mcp._lowlevel_server._add_request_handler("logging/setLevel", handle_set_logging_level) # pyright: ignore[reportPrivateUsage] +mcp._lowlevel_server._add_request_handler("resources/subscribe", handle_subscribe) # pyright: ignore[reportPrivateUsage] +``` + +This is a private API and may change. A public way to register these handlers on `MCPServer` is planned; until then, use this workaround or use the lowlevel `Server` directly. + +### `MCPServer`'s `Context` logging: `message` renamed to `data`, `extra` removed + +On the high-level `Context` object (`mcp.server.mcpserver.Context`), `log()`, `.debug()`, `.info()`, `.warning()`, and `.error()` now take `data: Any` instead of `message: str`, matching the MCP spec's `LoggingMessageNotificationParams.data` field which allows any JSON-serializable value. The `extra` parameter has been removed — pass structured data directly as `data`. + +The lowlevel `ServerSession.send_log_message(data: Any)` already accepted arbitrary data and is unchanged. + +`Context.log()` also now accepts all eight RFC-5424 log levels (`debug`, `info`, `notice`, `warning`, `error`, `critical`, `alert`, `emergency`) via the `LoggingLevel` type, not just the four it previously allowed. + +```python +# Before +await ctx.info("Connection failed", extra={"host": "localhost", "port": 5432}) +await ctx.log(level="info", message="hello") + +# After +await ctx.info({"message": "Connection failed", "host": "localhost", "port": 5432}) +await ctx.log(level="info", data="hello") +``` + +Positional calls (`await ctx.info("hello")`) are unaffected. + ### Replace `RootModel` by union types with `TypeAdapter` validation The following union types are no longer `RootModel` subclasses: @@ -383,6 +527,22 @@ notification = server_notification_adapter.validate_python(data) # No .root access needed - notification is the actual type ``` +The same applies when constructing values — the wrapper call is no longer needed: + +**Before (v1):** + +```python +await session.send_notification(ClientNotification(InitializedNotification())) +await session.send_request(ClientRequest(PingRequest()), EmptyResult) +``` + +**After (v2):** + +```python +await session.send_notification(InitializedNotification()) +await session.send_request(PingRequest(), EmptyResult) +``` + **Available adapters:** | Union Type | Adapter | @@ -428,6 +588,8 @@ server = Server("my-server", on_call_tool=handle_call_tool) ### `RequestContext` type parameters simplified +The `mcp.shared.context` module has been removed. `RequestContext` is now split into `ClientRequestContext` (in `mcp.client.context`) and `ServerRequestContext` (in `mcp.server.context`). + The `RequestContext` class has been split to separate shared fields from server-specific fields. The shared `RequestContext` now only takes 1 type parameter (the session type) instead of 3. **`RequestContext` changes:** @@ -458,11 +620,27 @@ ctx: ClientRequestContext server_ctx: ServerRequestContext[LifespanContextT, RequestT] ``` +The high-level `Context` class (injected into `@mcp.tool()` etc.) similarly dropped its `ServerSessionT` parameter: `Context[ServerSessionT, LifespanContextT, RequestT]` → `Context[LifespanContextT, RequestT]`. Both remaining parameters have defaults, so bare `Context` is usually sufficient: + +**Before (v1):** + +```python +async def my_tool(ctx: Context[ServerSession, None]) -> str: ... +``` + +**After (v2):** + +```python +async def my_tool(ctx: Context) -> str: ... +# or, with an explicit lifespan type: +async def my_tool(ctx: Context[MyLifespanState]) -> str: ... +``` + ### `ProgressContext` and `progress()` context manager removed The `mcp.shared.progress` module (`ProgressContext`, `Progress`, and the `progress()` context manager) has been removed. This module had no real-world adoption — all users send progress notifications via `Context.report_progress()` or `session.send_progress_notification()` directly. -**Before:** +**Before (v1):** ```python from mcp.shared.progress import progress @@ -490,6 +668,46 @@ await session.send_progress_notification( ) ``` +### `create_connected_server_and_client_session` removed + +The `create_connected_server_and_client_session` helper in `mcp.shared.memory` has been removed. Use `mcp.client.Client` instead — it accepts a `Server` or `MCPServer` instance directly and handles the in-memory transport and session setup for you. + +**Before (v1):** + +```python +from mcp.shared.memory import create_connected_server_and_client_session + +async with create_connected_server_and_client_session(server) as session: + result = await session.call_tool("my_tool", {"x": 1}) +``` + +**After (v2):** + +```python +from mcp.client import Client + +async with Client(server) as client: + result = await client.call_tool("my_tool", {"x": 1}) +``` + +`Client` accepts the same callback parameters the old helper did (`sampling_callback`, `list_roots_callback`, `logging_callback`, `message_handler`, `elicitation_callback`, `client_info`) plus `raise_exceptions` to surface server-side errors. + +If you need direct access to the underlying `ClientSession` and memory streams (e.g., for low-level transport testing), `create_client_server_memory_streams` is still available in `mcp.shared.memory`: + +```python +import anyio +from mcp.client.session import ClientSession +from mcp.shared.memory import create_client_server_memory_streams + +async with create_client_server_memory_streams() as (client_streams, server_streams): + async with anyio.create_task_group() as tg: + tg.start_soon(lambda: server.run(*server_streams, server.create_initialization_options())) + async with ClientSession(*client_streams) as session: + await session.initialize() + ... + tg.cancel_scope.cancel() +``` + ### Resource URI type changed from `AnyUrl` to `str` The `uri` field on resource-related types now uses `str` instead of Pydantic's `AnyUrl`. This aligns with the [MCP specification schema](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/main/schema/draft/schema.ts) which defines URIs as plain strings (`uri: string`) without strict URL validation. This change allows relative paths like `users/me` that were previously rejected. @@ -593,6 +811,8 @@ if ListToolsRequest in server.request_handlers: server = Server("my-server", on_list_tools=handle_list_tools) ``` +If you need to check whether a handler is registered, track this yourself — there is currently no public introspection API. + ### Lowlevel `Server`: decorator-based handlers replaced with constructor `on_*` params The lowlevel `Server` class no longer uses decorator methods for handler registration. Instead, handlers are passed as `on_*` keyword arguments to the constructor. @@ -645,6 +865,29 @@ server = Server("my-server", on_list_tools=handle_list_tools, on_call_tool=handl - Handlers return the full result type (e.g. `ListToolsResult`) rather than unwrapped values (e.g. `list[Tool]`). - The automatic `jsonschema` input/output validation that the old `call_tool()` decorator performed has been removed. There is no built-in replacement — if you relied on schema validation in the lowlevel server, you will need to validate inputs yourself in your handler. +**Complete handler reference:** + +All handlers receive `ctx: ServerRequestContext` as the first argument. The second argument and return type are: + +| v1 decorator | v2 constructor kwarg | `params` type | return type | +|---|---|---|---| +| `@server.list_tools()` | `on_list_tools` | `PaginatedRequestParams \| None` | `ListToolsResult` | +| `@server.call_tool()` | `on_call_tool` | `CallToolRequestParams` | `CallToolResult \| CreateTaskResult` | +| `@server.list_resources()` | `on_list_resources` | `PaginatedRequestParams \| None` | `ListResourcesResult` | +| `@server.list_resource_templates()` | `on_list_resource_templates` | `PaginatedRequestParams \| None` | `ListResourceTemplatesResult` | +| `@server.read_resource()` | `on_read_resource` | `ReadResourceRequestParams` | `ReadResourceResult` | +| `@server.subscribe_resource()` | `on_subscribe_resource` | `SubscribeRequestParams` | `EmptyResult` | +| `@server.unsubscribe_resource()` | `on_unsubscribe_resource` | `UnsubscribeRequestParams` | `EmptyResult` | +| `@server.list_prompts()` | `on_list_prompts` | `PaginatedRequestParams \| None` | `ListPromptsResult` | +| `@server.get_prompt()` | `on_get_prompt` | `GetPromptRequestParams` | `GetPromptResult` | +| `@server.completion()` | `on_completion` | `CompleteRequestParams` | `CompleteResult` | +| `@server.set_logging_level()` | `on_set_logging_level` | `SetLevelRequestParams` | `EmptyResult` | +| — | `on_ping` | `RequestParams \| None` | `EmptyResult` | +| `@server.progress_notification()` | `on_progress` | `ProgressNotificationParams` | `None` | +| — | `on_roots_list_changed` | `NotificationParams \| None` | `None` | + +All `params` and return types are importable from `mcp.types`. + **Notification handlers:** ```python @@ -694,10 +937,17 @@ Note: `params.arguments` can be `None` (the old decorator defaulted it to `{}`). **`read_resource()` — content type wrapping removed:** -The old decorator auto-wrapped `str` into `TextResourceContents` and `bytes` into `BlobResourceContents` (with base64 encoding), and applied a default mime type of `text/plain`: +The old decorator auto-wrapped `Iterable[ReadResourceContents]` (and the deprecated `str`/`bytes` shorthand) into `TextResourceContents`/`BlobResourceContents`, handling base64 encoding and mime-type defaulting: ```python -# Before (v1) — str/bytes auto-wrapped with mime type defaulting +# Before (v1) — Iterable[ReadResourceContents] auto-wrapped +from mcp.server.lowlevel.helper_types import ReadResourceContents + +@server.read_resource() +async def handle(uri: AnyUrl) -> Iterable[ReadResourceContents]: + return [ReadResourceContents(content="file contents", mime_type="text/plain")] + +# Before (v1) — str/bytes shorthand (already deprecated in v1) @server.read_resource() async def handle(uri: str) -> str: return "file contents" @@ -849,7 +1099,7 @@ params = CallToolRequestParams( params = CallToolRequestParams( name="my_tool", arguments={}, - _meta={"progressToken": "tok", "customField": "value"}, # OK + _meta={"my_custom_key": "value", "another": 123}, # OK ) ``` diff --git a/examples/clients/simple-auth-client/mcp_simple_auth_client/main.py b/examples/clients/simple-auth-client/mcp_simple_auth_client/main.py index 5fac56be5..6ef2f0b11 100644 --- a/examples/clients/simple-auth-client/mcp_simple_auth_client/main.py +++ b/examples/clients/simple-auth-client/mcp_simple_auth_client/main.py @@ -18,7 +18,7 @@ from urllib.parse import parse_qs, urlparse import httpx -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream +from mcp.client._transport import ReadStream, WriteStream from mcp.client.auth import OAuthClientProvider, TokenStorage from mcp.client.session import ClientSession from mcp.client.sse import sse_client @@ -241,8 +241,8 @@ async def _default_redirect_handler(authorization_url: str) -> None: async def _run_session( self, - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], - write_stream: MemoryObjectSendStream[SessionMessage], + read_stream: ReadStream[SessionMessage | Exception], + write_stream: WriteStream[SessionMessage], ): """Run the MCP session with the given streams.""" print("🤝 Initializing MCP session...") diff --git a/pyproject.toml b/pyproject.toml index 624ade170..a5d2c3d80 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ ] dependencies = [ "anyio>=4.9", - "httpx>=0.27.1", + "httpx>=0.27.1,<1.0.0", "httpx-sse>=0.4", "pydantic>=2.12.0", "starlette>=0.48.0; python_version >= '3.14'", @@ -40,6 +40,7 @@ dependencies = [ "pyjwt[crypto]>=2.10.1", "typing-extensions>=4.13.0", "typing-inspection>=0.4.1", + "opentelemetry-api>=1.28.0", ] [project.optional-dependencies] @@ -71,6 +72,7 @@ dev = [ "coverage[toml]>=7.10.7,<=7.13", "pillow>=12.0", "strict-no-cover", + "logfire>=3.0.0", ] docs = [ "mkdocs>=1.6.1", @@ -183,7 +185,6 @@ filterwarnings = [ # This should be fixed on Uvicorn's side. "ignore::DeprecationWarning:websockets", "ignore:websockets.server.WebSocketServerProtocol is deprecated:DeprecationWarning", - "ignore:Returning str or bytes.*:DeprecationWarning:mcp.server.lowlevel", # pywin32 internal deprecation warning "ignore:getargs.*The 'u' format is deprecated:DeprecationWarning", ] @@ -219,13 +220,10 @@ skip_covered = true show_missing = true ignore_errors = true precision = 2 -exclude_lines = [ - "pragma: no cover", +exclude_also = [ "pragma: lax no cover", - "if TYPE_CHECKING:", "@overload", "raise NotImplementedError", - "^\\s*\\.\\.\\.\\s*$", ] # https://coverage.readthedocs.io/en/latest/config.html#paths diff --git a/src/mcp/client/__main__.py b/src/mcp/client/__main__.py index f3db17906..b9ec34422 100644 --- a/src/mcp/client/__main__.py +++ b/src/mcp/client/__main__.py @@ -6,9 +6,9 @@ from urllib.parse import urlparse import anyio -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from mcp import types +from mcp.client._transport import ReadStream, WriteStream from mcp.client.session import ClientSession from mcp.client.sse import sse_client from mcp.client.stdio import StdioServerParameters, stdio_client @@ -33,8 +33,8 @@ async def message_handler( async def run_session( - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], - write_stream: MemoryObjectSendStream[SessionMessage], + read_stream: ReadStream[SessionMessage | Exception], + write_stream: WriteStream[SessionMessage], client_info: types.Implementation | None = None, ): async with ClientSession( diff --git a/src/mcp/client/_transport.py b/src/mcp/client/_transport.py index a86362900..0163fef95 100644 --- a/src/mcp/client/_transport.py +++ b/src/mcp/client/_transport.py @@ -5,11 +5,12 @@ from contextlib import AbstractAsyncContextManager from typing import Protocol -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream - +from mcp.shared._stream_protocols import ReadStream, WriteStream from mcp.shared.message import SessionMessage -TransportStreams = tuple[MemoryObjectReceiveStream[SessionMessage | Exception], MemoryObjectSendStream[SessionMessage]] +__all__ = ["ReadStream", "WriteStream", "Transport", "TransportStreams"] + +TransportStreams = tuple[ReadStream[SessionMessage | Exception], WriteStream[SessionMessage]] class Transport(AbstractAsyncContextManager[TransportStreams], Protocol): diff --git a/src/mcp/client/auth/oauth2.py b/src/mcp/client/auth/oauth2.py index 25075dec3..72309f577 100644 --- a/src/mcp/client/auth/oauth2.py +++ b/src/mcp/client/auth/oauth2.py @@ -320,7 +320,7 @@ async def _perform_authorization_code_grant(self) -> tuple[str, str]: raise OAuthFlowError("No callback handler provided for authorization code grant") # pragma: no cover if self.context.oauth_metadata and self.context.oauth_metadata.authorization_endpoint: - auth_endpoint = str(self.context.oauth_metadata.authorization_endpoint) # pragma: no cover + auth_endpoint = str(self.context.oauth_metadata.authorization_endpoint) else: auth_base_url = self.context.get_authorization_base_url(self.context.server_url) auth_endpoint = urljoin(auth_base_url, "/authorize") @@ -343,11 +343,16 @@ async def _perform_authorization_code_grant(self) -> tuple[str, str]: # Only include resource param if conditions are met if self.context.should_include_resource_param(self.context.protocol_version): - auth_params["resource"] = self.context.get_resource_url() # RFC 8707 # pragma: no cover + auth_params["resource"] = self.context.get_resource_url() # RFC 8707 if self.context.client_metadata.scope: # pragma: no branch auth_params["scope"] = self.context.client_metadata.scope + # OIDC requires prompt=consent when offline_access is requested + # https://openid.net/specs/openid-connect-core-1_0.html#OfflineAccess + if "offline_access" in self.context.client_metadata.scope.split(): + auth_params["prompt"] = "consent" + authorization_url = f"{auth_endpoint}?{urlencode(auth_params)}" await self.context.redirect_handler(authorization_url) @@ -576,6 +581,7 @@ async def async_auth_flow(self, request: httpx.Request) -> AsyncGenerator[httpx. extract_scope_from_www_auth(response), self.context.protected_resource_metadata, self.context.oauth_metadata, + self.context.client_metadata.grant_types, ) # Step 4: Register client or use URL-based client ID (CIMD) @@ -622,7 +628,10 @@ async def async_auth_flow(self, request: httpx.Request) -> AsyncGenerator[httpx. try: # Step 2a: Update the required scopes self.context.client_metadata.scope = get_client_metadata_scopes( - extract_scope_from_www_auth(response), self.context.protected_resource_metadata + extract_scope_from_www_auth(response), + self.context.protected_resource_metadata, + self.context.oauth_metadata, + self.context.client_metadata.grant_types, ) # Step 2b: Perform (re-)authorization and token exchange diff --git a/src/mcp/client/auth/utils.py b/src/mcp/client/auth/utils.py index 0ca36b98d..d75324f2f 100644 --- a/src/mcp/client/auth/utils.py +++ b/src/mcp/client/auth/utils.py @@ -99,24 +99,36 @@ def get_client_metadata_scopes( www_authenticate_scope: str | None, protected_resource_metadata: ProtectedResourceMetadata | None, authorization_server_metadata: OAuthMetadata | None = None, + client_grant_types: list[str] | None = None, ) -> str | None: - """Select scopes as outlined in the 'Scope Selection Strategy' in the MCP spec.""" - # Per MCP spec, scope selection priority order: - # 1. Use scope from WWW-Authenticate header (if provided) - # 2. Use all scopes from PRM scopes_supported (if available) - # 3. Omit scope parameter if neither is available - + """Select effective scopes and augment for refresh token support.""" + selected_scope: str | None = None + + # MCP spec scope selection priority: + # 1. WWW-Authenticate header scope + # 2. PRM scopes_supported + # 3. AS scopes_supported (SDK fallback) + # 4. Omit scope parameter if www_authenticate_scope is not None: - # Priority 1: WWW-Authenticate header scope - return www_authenticate_scope + selected_scope = www_authenticate_scope elif protected_resource_metadata is not None and protected_resource_metadata.scopes_supported is not None: - # Priority 2: PRM scopes_supported - return " ".join(protected_resource_metadata.scopes_supported) + selected_scope = " ".join(protected_resource_metadata.scopes_supported) elif authorization_server_metadata is not None and authorization_server_metadata.scopes_supported is not None: - return " ".join(authorization_server_metadata.scopes_supported) # pragma: no cover - else: - # Priority 3: Omit scope parameter - return None + selected_scope = " ".join(authorization_server_metadata.scopes_supported) + + # SEP-2207: append offline_access when the AS supports it and the client can use refresh tokens + if ( + selected_scope is not None + and authorization_server_metadata is not None + and authorization_server_metadata.scopes_supported is not None + and "offline_access" in authorization_server_metadata.scopes_supported + and client_grant_types is not None + and "refresh_token" in client_grant_types + and "offline_access" not in selected_scope.split() + ): + selected_scope = f"{selected_scope} offline_access" + + return selected_scope def build_oauth_authorization_server_metadata_discovery_urls(auth_server_url: str | None, server_url: str) -> list[str]: diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index 7c964a334..0cea454a7 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -4,10 +4,10 @@ from typing import Any, Protocol import anyio.lowlevel -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from pydantic import TypeAdapter from mcp import types +from mcp.client._transport import ReadStream, WriteStream from mcp.client.experimental import ExperimentalClientFeatures from mcp.client.experimental.task_handlers import ExperimentalTaskHandlers from mcp.shared._context import RequestContext @@ -109,8 +109,8 @@ class ClientSession( ): def __init__( self, - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], - write_stream: MemoryObjectSendStream[SessionMessage], + read_stream: ReadStream[SessionMessage | Exception], + write_stream: WriteStream[SessionMessage], read_timeout_seconds: float | None = None, sampling_callback: SamplingFnT | None = None, elicitation_callback: ElicitationFnT | None = None, diff --git a/src/mcp/client/sse.py b/src/mcp/client/sse.py index 7b66b5c1b..193204a15 100644 --- a/src/mcp/client/sse.py +++ b/src/mcp/client/sse.py @@ -7,11 +7,11 @@ import anyio import httpx from anyio.abc import TaskStatus -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from httpx_sse import aconnect_sse from httpx_sse._exceptions import SSEError from mcp import types +from mcp.shared._context_streams import create_context_streams from mcp.shared._httpx_utils import McpHttpClientFactory, create_mcp_http_client from mcp.shared.message import SessionMessage @@ -51,12 +51,6 @@ async def sse_client( auth: Optional HTTPX authentication handler. on_session_created: Optional callback invoked with the session ID when received. """ - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] - read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] - - write_stream: MemoryObjectSendStream[SessionMessage] - write_stream_reader: MemoryObjectReceiveStream[SessionMessage] - logger.debug(f"Connecting to SSE endpoint: {remove_request_params(url)}") async with httpx_client_factory( headers=headers, auth=auth, timeout=httpx.Timeout(timeout, read=sse_read_timeout) @@ -65,8 +59,8 @@ async def sse_client( event_source.response.raise_for_status() logger.debug("SSE connection established") - read_stream_writer, read_stream = anyio.create_memory_object_stream(0) - write_stream, write_stream_reader = anyio.create_memory_object_stream(0) + read_stream_writer, read_stream = create_context_streams[SessionMessage | Exception](0) + write_stream, write_stream_reader = create_context_streams[SessionMessage](0) async def sse_reader(task_status: TaskStatus[str] = anyio.TASK_STATUS_IGNORED): try: @@ -124,7 +118,8 @@ async def sse_reader(task_status: TaskStatus[str] = anyio.TASK_STATUS_IGNORED): async def post_writer(endpoint_url: str): try: async with write_stream_reader, write_stream: - async for session_message in write_stream_reader: + + async def _send_message(session_message: SessionMessage) -> None: logger.debug(f"Sending client message: {session_message}") response = await client.post( endpoint_url, @@ -136,6 +131,14 @@ async def post_writer(endpoint_url: str): ) response.raise_for_status() logger.debug(f"Client message sent successfully: {response.status_code}") + + async for session_message in write_stream_reader: + sender_ctx = write_stream_reader.last_context + if sender_ctx is not None: + async with anyio.create_task_group() as tg: + sender_ctx.run(tg.start_soon, _send_message, session_message) + else: + await _send_message(session_message) # pragma: no cover except Exception: # pragma: lax no cover logger.exception("Error in post_writer") diff --git a/src/mcp/client/streamable_http.py b/src/mcp/client/streamable_http.py index 3afb94b03..9a119c633 100644 --- a/src/mcp/client/streamable_http.py +++ b/src/mcp/client/streamable_http.py @@ -11,11 +11,11 @@ import anyio import httpx from anyio.abc import TaskGroup -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from httpx_sse import EventSource, ServerSentEvent, aconnect_sse from pydantic import ValidationError from mcp.client._transport import TransportStreams +from mcp.shared._context_streams import ContextReceiveStream, ContextSendStream, create_context_streams from mcp.shared._httpx_utils import create_mcp_http_client from mcp.shared.message import ClientMessageMetadata, SessionMessage from mcp.types import ( @@ -38,8 +38,8 @@ # TODO(Marcelo): Put the TransportStreams in a module under shared, so we can import here. SessionMessageOrError = SessionMessage | Exception -StreamWriter = MemoryObjectSendStream[SessionMessageOrError] -StreamReader = MemoryObjectReceiveStream[SessionMessage] +StreamWriter = ContextSendStream[SessionMessageOrError] +StreamReader = ContextReceiveStream[SessionMessage] MCP_SESSION_ID = "mcp-session-id" MCP_PROTOCOL_VERSION = "mcp-protocol-version" @@ -434,14 +434,15 @@ async def post_writer( client: httpx.AsyncClient, write_stream_reader: StreamReader, read_stream_writer: StreamWriter, - write_stream: MemoryObjectSendStream[SessionMessage], + write_stream: ContextSendStream[SessionMessage], start_get_stream: Callable[[], None], tg: TaskGroup, ) -> None: """Handle writing requests to the server.""" try: async with write_stream_reader, read_stream_writer, write_stream: - async for session_message in write_stream_reader: + + async def _handle_message(session_message: SessionMessage) -> None: message = session_message.message metadata = ( session_message.metadata @@ -478,6 +479,14 @@ async def handle_request_async(): else: await handle_request_async() + async for session_message in write_stream_reader: + sender_ctx = write_stream_reader.last_context + if sender_ctx is not None: + async with anyio.create_task_group() as tg_local: + sender_ctx.run(tg_local.start_soon, _handle_message, session_message) + else: + await _handle_message(session_message) # pragma: no cover + except Exception: # pragma: lax no cover logger.exception("Error in post_writer") @@ -547,8 +556,8 @@ async def streamable_http_client( if not client_provided: await stack.enter_async_context(client) - read_stream_writer, read_stream = anyio.create_memory_object_stream[SessionMessage | Exception](0) - write_stream, write_stream_reader = anyio.create_memory_object_stream[SessionMessage](0) + read_stream_writer, read_stream = create_context_streams[SessionMessage | Exception](0) + write_stream, write_stream_reader = create_context_streams[SessionMessage](0) async with ( read_stream_writer, diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index c28842272..59de0ace4 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -36,15 +36,16 @@ async def main(): from __future__ import annotations +import contextvars import logging import warnings from collections.abc import AsyncIterator, Awaitable, Callable from contextlib import AbstractAsyncContextManager, AsyncExitStack, asynccontextmanager from importlib.metadata import version as importlib_version -from typing import Any, Generic +from typing import Any, Generic, cast import anyio -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream +from opentelemetry.trace import SpanKind, StatusCode from starlette.applications import Starlette from starlette.middleware import Middleware from starlette.middleware.authentication import AuthenticationMiddleware @@ -65,6 +66,8 @@ async def main(): from mcp.server.streamable_http import EventStore from mcp.server.streamable_http_manager import StreamableHTTPASGIApp, StreamableHTTPSessionManager from mcp.server.transport_security import TransportSecuritySettings +from mcp.shared._otel import extract_trace_context, otel_span +from mcp.shared._stream_protocols import ReadStream, WriteStream from mcp.shared.exceptions import MCPError from mcp.shared.message import ServerMessageMetadata, SessionMessage from mcp.shared.session import RequestResponder @@ -355,8 +358,8 @@ def session_manager(self) -> StreamableHTTPSessionManager: async def run( self, - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], - write_stream: MemoryObjectSendStream[SessionMessage], + read_stream: ReadStream[SessionMessage | Exception], + write_stream: WriteStream[SessionMessage], initialization_options: InitializationOptions, # When False, exceptions are returned as messages to the client. # When True, exceptions are raised, which will cause the server to shut down @@ -391,7 +394,13 @@ async def run( async for message in session.incoming_messages: logger.debug("Received message: %s", message) - tg.start_soon( + if isinstance(message, RequestResponder) and message.context is not None: + context = message.context + else: + context = contextvars.copy_context() + + context.run( + tg.start_soon, self._handle_message, message, session, @@ -439,72 +448,90 @@ async def _handle_request( ): logger.info("Processing request of type %s", type(req).__name__) - if handler := self._request_handlers.get(req.method): - logger.debug("Dispatching request of type %s", type(req).__name__) + target = getattr(req.params, "name", None) if req.params else None + span_name = f"MCP handle {req.method} {target}" if target else f"MCP handle {req.method}" - try: - # Extract request context and close_sse_stream from message metadata - request_data = None - close_sse_stream_cb = None - close_standalone_sse_stream_cb = None - if message.message_metadata is not None and isinstance(message.message_metadata, ServerMessageMetadata): - request_data = message.message_metadata.request_context - close_sse_stream_cb = message.message_metadata.close_sse_stream - close_standalone_sse_stream_cb = message.message_metadata.close_standalone_sse_stream + # Extract W3C trace context from _meta (SEP-414). + meta = cast(dict[str, Any] | None, getattr(req.params, "meta", None)) if req.params else None + parent_context = extract_trace_context(meta) if meta is not None else None - client_capabilities = session.client_params.capabilities if session.client_params else None - task_support = self._experimental_handlers.task_support if self._experimental_handlers else None - # Get task metadata from request params if present - task_metadata = None - if hasattr(req, "params") and req.params is not None: - task_metadata = getattr(req.params, "task", None) - ctx = ServerRequestContext( - request_id=message.request_id, - meta=message.request_meta, - session=session, - lifespan_context=lifespan_context, - experimental=Experimental( - task_metadata=task_metadata, - _client_capabilities=client_capabilities, - _session=session, - _task_support=task_support, - ), - request=request_data, - close_sse_stream=close_sse_stream_cb, - close_standalone_sse_stream=close_standalone_sse_stream_cb, - ) - response = await handler(ctx, req.params) - except MCPError as err: - response = err.error - except anyio.get_cancelled_exc_class(): - if message.cancelled: - # Client sent CancelledNotification; responder.cancel() already - # sent an error response, so skip the duplicate. - logger.info("Request %s cancelled - duplicate response suppressed", message.request_id) - return - # Transport-close cancellation from the TG in run(); re-raise so the - # TG swallows its own cancellation. - raise - except Exception as err: - if raise_exceptions: # pragma: no cover - raise err - response = types.ErrorData(code=0, message=str(err)) - else: # pragma: no cover - response = types.ErrorData(code=types.METHOD_NOT_FOUND, message="Method not found") - - try: - await message.respond(response) - except (anyio.BrokenResourceError, anyio.ClosedResourceError): - # Transport closed between handler unblocking and respond. Happens - # when _receive_loop's finally wakes a handler blocked on - # send_request: the handler runs to respond() before run()'s TG - # cancel fires, but after the write stream closed. Closed if our - # end closed (_receive_loop's async-with exit); Broken if the peer - # end closed first (streamable_http terminate()). - logger.debug("Response for %s dropped - transport closed", message.request_id) - return - - logger.debug("Response sent") + with otel_span( + span_name, + kind=SpanKind.SERVER, + attributes={"mcp.method.name": req.method, "jsonrpc.request.id": message.request_id}, + context=parent_context, + ) as span: + if handler := self._request_handlers.get(req.method): + logger.debug("Dispatching request of type %s", type(req).__name__) + + try: + # Extract request context and close_sse_stream from message metadata + request_data = None + close_sse_stream_cb = None + close_standalone_sse_stream_cb = None + if message.message_metadata is not None and isinstance( + message.message_metadata, ServerMessageMetadata + ): + request_data = message.message_metadata.request_context + close_sse_stream_cb = message.message_metadata.close_sse_stream + close_standalone_sse_stream_cb = message.message_metadata.close_standalone_sse_stream + + client_capabilities = session.client_params.capabilities if session.client_params else None + task_support = self._experimental_handlers.task_support if self._experimental_handlers else None + # Get task metadata from request params if present + task_metadata = None + if hasattr(req, "params") and req.params is not None: # pragma: no branch + task_metadata = getattr(req.params, "task", None) + ctx = ServerRequestContext( + request_id=message.request_id, + meta=message.request_meta, + session=session, + lifespan_context=lifespan_context, + experimental=Experimental( + task_metadata=task_metadata, + _client_capabilities=client_capabilities, + _session=session, + _task_support=task_support, + ), + request=request_data, + close_sse_stream=close_sse_stream_cb, + close_standalone_sse_stream=close_standalone_sse_stream_cb, + ) + response = await handler(ctx, req.params) + except MCPError as err: + response = err.error + except anyio.get_cancelled_exc_class(): + if message.cancelled: + # Client sent CancelledNotification; responder.cancel() already + # sent an error response, so skip the duplicate. + logger.info("Request %s cancelled - duplicate response suppressed", message.request_id) + return + # Transport-close cancellation from the TG in run(); re-raise so the + # TG swallows its own cancellation. + raise + except Exception as err: + if raise_exceptions: # pragma: no cover + raise err + response = types.ErrorData(code=0, message=str(err)) + else: # pragma: no cover + response = types.ErrorData(code=types.METHOD_NOT_FOUND, message="Method not found") + + if isinstance(response, types.ErrorData) and span is not None: + span.set_status(StatusCode.ERROR, response.message) + + try: + await message.respond(response) + except (anyio.BrokenResourceError, anyio.ClosedResourceError): + # Transport closed between handler unblocking and respond. Happens + # when _receive_loop's finally wakes a handler blocked on + # send_request: the handler runs to respond() before run()'s TG + # cancel fires, but after the write stream closed. Closed if our + # end closed (_receive_loop's async-with exit); Broken if the peer + # end closed first (streamable_http terminate()). + logger.debug("Response for %s dropped - transport closed", message.request_id) + return + + logger.debug("Response sent") async def _handle_notification( self, diff --git a/src/mcp/server/mcpserver/context.py b/src/mcp/server/mcpserver/context.py index 1538adc7c..e87388eee 100644 --- a/src/mcp/server/mcpserver/context.py +++ b/src/mcp/server/mcpserver/context.py @@ -1,7 +1,7 @@ from __future__ import annotations from collections.abc import Iterable -from typing import TYPE_CHECKING, Any, Generic, Literal +from typing import TYPE_CHECKING, Any, Generic from pydantic import AnyUrl, BaseModel @@ -14,6 +14,7 @@ elicit_with_validation, ) from mcp.server.lowlevel.helper_types import ReadResourceContents +from mcp.types import LoggingLevel if TYPE_CHECKING: from mcp.server.mcpserver.server import MCPServer @@ -186,29 +187,23 @@ async def elicit_url( async def log( self, - level: Literal["debug", "info", "warning", "error"], - message: str, + level: LoggingLevel, + data: Any, *, logger_name: str | None = None, - extra: dict[str, Any] | None = None, ) -> None: """Send a log message to the client. Args: - level: Log level (debug, info, warning, error) - message: Log message + level: Log level (debug, info, notice, warning, error, critical, + alert, emergency) + data: The data to be logged. Any JSON serializable type is allowed + (string, dict, list, number, bool, etc.) per the MCP specification. logger_name: Optional logger name - extra: Optional dictionary with additional structured data to include """ - - if extra: - log_data = {"message": message, **extra} - else: - log_data = message - await self.request_context.session.send_log_message( level=level, - data=log_data, + data=data, logger=logger_name, related_request_id=self.request_id, ) @@ -261,20 +256,18 @@ async def close_standalone_sse_stream(self) -> None: await self._request_context.close_standalone_sse_stream() # Convenience methods for common log levels - async def debug(self, message: str, *, logger_name: str | None = None, extra: dict[str, Any] | None = None) -> None: + async def debug(self, data: Any, *, logger_name: str | None = None) -> None: """Send a debug log message.""" - await self.log("debug", message, logger_name=logger_name, extra=extra) + await self.log("debug", data, logger_name=logger_name) - async def info(self, message: str, *, logger_name: str | None = None, extra: dict[str, Any] | None = None) -> None: + async def info(self, data: Any, *, logger_name: str | None = None) -> None: """Send an info log message.""" - await self.log("info", message, logger_name=logger_name, extra=extra) + await self.log("info", data, logger_name=logger_name) - async def warning( - self, message: str, *, logger_name: str | None = None, extra: dict[str, Any] | None = None - ) -> None: + async def warning(self, data: Any, *, logger_name: str | None = None) -> None: """Send a warning log message.""" - await self.log("warning", message, logger_name=logger_name, extra=extra) + await self.log("warning", data, logger_name=logger_name) - async def error(self, message: str, *, logger_name: str | None = None, extra: dict[str, Any] | None = None) -> None: + async def error(self, data: Any, *, logger_name: str | None = None) -> None: """Send an error log message.""" - await self.log("error", message, logger_name=logger_name, extra=extra) + await self.log("error", data, logger_name=logger_name) diff --git a/src/mcp/server/mcpserver/prompts/base.py b/src/mcp/server/mcpserver/prompts/base.py index 0c319d53c..e5b2af7d8 100644 --- a/src/mcp/server/mcpserver/prompts/base.py +++ b/src/mcp/server/mcpserver/prompts/base.py @@ -2,15 +2,17 @@ from __future__ import annotations -import inspect +import functools from collections.abc import Awaitable, Callable, Sequence from typing import TYPE_CHECKING, Any, Literal +import anyio.to_thread import pydantic_core from pydantic import BaseModel, Field, TypeAdapter, validate_call from mcp.server.mcpserver.utilities.context_injection import find_context_parameter, inject_context from mcp.server.mcpserver.utilities.func_metadata import func_metadata +from mcp.shared._callable_inspection import is_async_callable from mcp.types import ContentBlock, Icon, TextContent if TYPE_CHECKING: @@ -155,10 +157,11 @@ async def render( # Add context to arguments if needed call_args = inject_context(self.fn, arguments or {}, context, self.context_kwarg) - # Call function and check if result is a coroutine - result = self.fn(**call_args) - if inspect.iscoroutine(result): - result = await result + fn = self.fn + if is_async_callable(fn): + result = await fn(**call_args) + else: + result = await anyio.to_thread.run_sync(functools.partial(self.fn, **call_args)) # Validate messages if not isinstance(result, list | tuple): diff --git a/src/mcp/server/mcpserver/resources/resource_manager.py b/src/mcp/server/mcpserver/resources/resource_manager.py index 6bf17376d..766cf51ae 100644 --- a/src/mcp/server/mcpserver/resources/resource_manager.py +++ b/src/mcp/server/mcpserver/resources/resource_manager.py @@ -22,28 +22,26 @@ class ResourceManager: """Manages MCPServer resources.""" - def __init__(self, warn_on_duplicate_resources: bool = True): + def __init__(self, warn_on_duplicate_resources: bool = True, *, resources: list[Resource] | None = None): self._resources: dict[str, Resource] = {} self._templates: dict[str, ResourceTemplate] = {} self.warn_on_duplicate_resources = warn_on_duplicate_resources + for resource in resources or (): + self.add_resource(resource) + def add_resource(self, resource: Resource) -> Resource: """Add a resource to the manager. Args: - resource: A Resource instance to add + resource: A Resource instance to add. Returns: - The added resource. If a resource with the same URI already exists, - returns the existing resource. + The added resource. If a resource with the same URI already exists, returns the existing resource. """ logger.debug( "Adding resource", - extra={ - "uri": resource.uri, - "type": type(resource).__name__, - "resource_name": resource.name, - }, + extra={"uri": resource.uri, "type": type(resource).__name__, "resource_name": resource.name}, ) existing = self._resources.get(str(resource.uri)) if existing: diff --git a/src/mcp/server/mcpserver/resources/templates.py b/src/mcp/server/mcpserver/resources/templates.py index 2d612657c..f1ee29a37 100644 --- a/src/mcp/server/mcpserver/resources/templates.py +++ b/src/mcp/server/mcpserver/resources/templates.py @@ -2,17 +2,19 @@ from __future__ import annotations -import inspect +import functools import re from collections.abc import Callable from typing import TYPE_CHECKING, Any from urllib.parse import unquote +import anyio.to_thread from pydantic import BaseModel, Field, validate_call from mcp.server.mcpserver.resources.types import FunctionResource, Resource from mcp.server.mcpserver.utilities.context_injection import find_context_parameter, inject_context from mcp.server.mcpserver.utilities.func_metadata import func_metadata +from mcp.shared._callable_inspection import is_async_callable from mcp.types import Annotations, Icon if TYPE_CHECKING: @@ -110,10 +112,11 @@ async def create_resource( # Add context to params if needed params = inject_context(self.fn, params, context, self.context_kwarg) - # Call function and check if result is a coroutine - result = self.fn(**params) - if inspect.iscoroutine(result): - result = await result + fn = self.fn + if is_async_callable(fn): + result = await fn(**params) + else: + result = await anyio.to_thread.run_sync(functools.partial(self.fn, **params)) return FunctionResource( uri=uri, # type: ignore diff --git a/src/mcp/server/mcpserver/resources/types.py b/src/mcp/server/mcpserver/resources/types.py index 42aecd6e3..d9e472e36 100644 --- a/src/mcp/server/mcpserver/resources/types.py +++ b/src/mcp/server/mcpserver/resources/types.py @@ -1,6 +1,7 @@ """Concrete resource implementations.""" -import inspect +from __future__ import annotations + import json from collections.abc import Callable from pathlib import Path @@ -14,6 +15,7 @@ from pydantic import Field, ValidationInfo, validate_call from mcp.server.mcpserver.resources.base import Resource +from mcp.shared._callable_inspection import is_async_callable from mcp.types import Annotations, Icon @@ -55,11 +57,11 @@ class FunctionResource(Resource): async def read(self) -> str | bytes: """Read the resource by calling the wrapped function.""" try: - # Call the function first to see if it returns a coroutine - result = self.fn() - # If it's a coroutine, await it - if inspect.iscoroutine(result): - result = await result + fn = self.fn + if is_async_callable(fn): + result = await fn() + else: + result = await anyio.to_thread.run_sync(self.fn) if isinstance(result, Resource): # pragma: no cover return await result.read() @@ -84,7 +86,7 @@ def from_function( icons: list[Icon] | None = None, annotations: Annotations | None = None, meta: dict[str, Any] | None = None, - ) -> "FunctionResource": + ) -> FunctionResource: """Create a FunctionResource from a function.""" func_name = name or fn.__name__ if func_name == "": # pragma: no cover diff --git a/src/mcp/server/mcpserver/server.py b/src/mcp/server/mcpserver/server.py index 2a7a58117..be77705da 100644 --- a/src/mcp/server/mcpserver/server.py +++ b/src/mcp/server/mcpserver/server.py @@ -105,6 +105,9 @@ class Settings(BaseSettings, Generic[LifespanResultT]): # prompt settings warn_on_duplicate_prompts: bool + dependencies: list[str] + """List of dependencies to install in the server environment. Used by the `mcp install` and `mcp dev` CLI.""" + lifespan: Callable[[MCPServer[LifespanResultT]], AbstractAsyncContextManager[LifespanResultT]] | None """An async context manager that will be called when the server is started.""" @@ -137,11 +140,13 @@ def __init__( token_verifier: TokenVerifier | None = None, *, tools: list[Tool] | None = None, + resources: list[Resource] | None = None, debug: bool = False, log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO", warn_on_duplicate_resources: bool = True, warn_on_duplicate_tools: bool = True, warn_on_duplicate_prompts: bool = True, + dependencies: list[str] | None = None, lifespan: Callable[[MCPServer[LifespanResultT]], AbstractAsyncContextManager[LifespanResultT]] | None = None, auth: AuthSettings | None = None, ): @@ -151,12 +156,16 @@ def __init__( warn_on_duplicate_resources=warn_on_duplicate_resources, warn_on_duplicate_tools=warn_on_duplicate_tools, warn_on_duplicate_prompts=warn_on_duplicate_prompts, + dependencies=dependencies or [], lifespan=lifespan, auth=auth, ) + self.dependencies = self.settings.dependencies self._tool_manager = ToolManager(tools=tools, warn_on_duplicate_tools=self.settings.warn_on_duplicate_tools) - self._resource_manager = ResourceManager(warn_on_duplicate_resources=self.settings.warn_on_duplicate_resources) + self._resource_manager = ResourceManager( + resources=resources, warn_on_duplicate_resources=self.settings.warn_on_duplicate_resources + ) self._prompt_manager = PromptManager(warn_on_duplicate_prompts=self.settings.warn_on_duplicate_prompts) self._lowlevel_server = Server( name=name or "mcp-server", diff --git a/src/mcp/server/mcpserver/tools/base.py b/src/mcp/server/mcpserver/tools/base.py index dc65be988..754313eb8 100644 --- a/src/mcp/server/mcpserver/tools/base.py +++ b/src/mcp/server/mcpserver/tools/base.py @@ -1,7 +1,5 @@ from __future__ import annotations -import functools -import inspect from collections.abc import Callable from functools import cached_property from typing import TYPE_CHECKING, Any @@ -11,6 +9,7 @@ from mcp.server.mcpserver.exceptions import ToolError from mcp.server.mcpserver.utilities.context_injection import find_context_parameter from mcp.server.mcpserver.utilities.func_metadata import FuncMetadata, func_metadata +from mcp.shared._callable_inspection import is_async_callable from mcp.shared.exceptions import UrlElicitationRequiredError from mcp.shared.tool_name_validation import validate_and_warn_tool_name from mcp.types import Icon, ToolAnnotations @@ -63,7 +62,7 @@ def from_function( raise ValueError("You must provide a name for lambda functions") func_doc = description or fn.__doc__ or "" - is_async = _is_async_callable(fn) + is_async = is_async_callable(fn) if context_kwarg is None: # pragma: no branch context_kwarg = find_context_parameter(fn) @@ -118,12 +117,3 @@ async def run( raise except Exception as e: raise ToolError(f"Error executing tool {self.name}: {e}") from e - - -def _is_async_callable(obj: Any) -> bool: - while isinstance(obj, functools.partial): # pragma: lax no cover - obj = obj.func - - return inspect.iscoroutinefunction(obj) or ( - callable(obj) and inspect.iscoroutinefunction(getattr(obj, "__call__", None)) - ) diff --git a/src/mcp/server/mcpserver/tools/tool_manager.py b/src/mcp/server/mcpserver/tools/tool_manager.py index 32ed54797..eef4911f9 100644 --- a/src/mcp/server/mcpserver/tools/tool_manager.py +++ b/src/mcp/server/mcpserver/tools/tool_manager.py @@ -18,18 +18,12 @@ class ToolManager: """Manages MCPServer tools.""" - def __init__( - self, - warn_on_duplicate_tools: bool = True, - *, - tools: list[Tool] | None = None, - ): + def __init__(self, warn_on_duplicate_tools: bool = True, *, tools: list[Tool] | None = None): self._tools: dict[str, Tool] = {} - if tools is not None: - for tool in tools: - if warn_on_duplicate_tools and tool.name in self._tools: - logger.warning(f"Tool already exists: {tool.name}") - self._tools[tool.name] = tool + for tool in tools or (): + if warn_on_duplicate_tools and tool.name in self._tools: + logger.warning(f"Tool already exists: {tool.name}") + self._tools[tool.name] = tool self.warn_on_duplicate_tools = warn_on_duplicate_tools diff --git a/src/mcp/server/mcpserver/utilities/func_metadata.py b/src/mcp/server/mcpserver/utilities/func_metadata.py index 062b47d0f..4a7610637 100644 --- a/src/mcp/server/mcpserver/utilities/func_metadata.py +++ b/src/mcp/server/mcpserver/utilities/func_metadata.py @@ -9,7 +9,7 @@ import anyio import anyio.to_thread import pydantic_core -from pydantic import BaseModel, ConfigDict, Field, WithJsonSchema, create_model +from pydantic import BaseModel, ConfigDict, Field, PydanticUserError, WithJsonSchema, create_model from pydantic.fields import FieldInfo from pydantic.json_schema import GenerateJsonSchema, JsonSchemaWarningKind from typing_extensions import is_typeddict @@ -402,9 +402,16 @@ def _try_create_model_and_schema( # Use StrictJsonSchema to raise exceptions instead of warnings try: schema = model.model_json_schema(schema_generator=StrictJsonSchema) - except (TypeError, ValueError, pydantic_core.SchemaError, pydantic_core.ValidationError) as e: + except ( + PydanticUserError, + TypeError, + ValueError, + pydantic_core.SchemaError, + pydantic_core.ValidationError, + ) as e: # These are expected errors when a type can't be converted to a Pydantic schema - # TypeError: When Pydantic can't handle the type + # PydanticUserError: When Pydantic can't handle the type (e.g. PydanticInvalidForJsonSchema); + # subclasses TypeError on pydantic <2.13 and RuntimeError on pydantic >=2.13 # ValueError: When there are issues with the type definition (including our custom warnings) # SchemaError: When Pydantic can't build a schema # ValidationError: When validation fails diff --git a/src/mcp/server/session.py b/src/mcp/server/session.py index ce467e6c9..20b640527 100644 --- a/src/mcp/server/session.py +++ b/src/mcp/server/session.py @@ -33,13 +33,14 @@ async def handle_list_prompts(ctx: RequestContext, params) -> ListPromptsResult: import anyio import anyio.lowlevel -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream +from anyio.streams.memory import MemoryObjectReceiveStream from pydantic import AnyUrl, TypeAdapter from mcp import types from mcp.server.experimental.session_features import ExperimentalServerSessionFeatures from mcp.server.models import InitializationOptions from mcp.server.validation import validate_sampling_tools, validate_tool_use_result_messages +from mcp.shared._stream_protocols import ReadStream, WriteStream from mcp.shared.exceptions import StatelessModeNotSupported from mcp.shared.experimental.tasks.capabilities import check_tasks_capability from mcp.shared.experimental.tasks.helpers import RELATED_TASK_METADATA_KEY @@ -79,8 +80,8 @@ class ServerSession( def __init__( self, - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], - write_stream: MemoryObjectSendStream[SessionMessage], + read_stream: ReadStream[SessionMessage | Exception], + write_stream: WriteStream[SessionMessage], init_options: InitializationOptions, stateless: bool = False, ) -> None: diff --git a/src/mcp/server/sse.py b/src/mcp/server/sse.py index 9dcee67f7..48192ff61 100644 --- a/src/mcp/server/sse.py +++ b/src/mcp/server/sse.py @@ -43,7 +43,6 @@ async def handle_sse(request): from uuid import UUID, uuid4 import anyio -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from pydantic import ValidationError from sse_starlette import EventSourceResponse from starlette.requests import Request @@ -55,6 +54,7 @@ async def handle_sse(request): TransportSecurityMiddleware, TransportSecuritySettings, ) +from mcp.shared._context_streams import ContextSendStream, create_context_streams from mcp.shared.message import ServerMessageMetadata, SessionMessage logger = logging.getLogger(__name__) @@ -72,7 +72,7 @@ class SseServerTransport: """ _endpoint: str - _read_stream_writers: dict[UUID, MemoryObjectSendStream[SessionMessage | Exception]] + _read_stream_writers: dict[UUID, ContextSendStream[SessionMessage | Exception]] _security: TransportSecurityMiddleware def __init__(self, endpoint: str, security_settings: TransportSecuritySettings | None = None) -> None: @@ -129,14 +129,9 @@ async def connect_sse(self, scope: Scope, receive: Receive, send: Send): # prag raise ValueError("Request validation failed") logger.debug("Setting up SSE connection") - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] - read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] - write_stream: MemoryObjectSendStream[SessionMessage] - write_stream_reader: MemoryObjectReceiveStream[SessionMessage] - - read_stream_writer, read_stream = anyio.create_memory_object_stream(0) - write_stream, write_stream_reader = anyio.create_memory_object_stream(0) + read_stream_writer, read_stream = create_context_streams[SessionMessage | Exception](0) + write_stream, write_stream_reader = create_context_streams[SessionMessage](0) session_id = uuid4() self._read_stream_writers[session_id] = read_stream_writer diff --git a/src/mcp/server/stdio.py b/src/mcp/server/stdio.py index 5ea6c4e77..5c1459dff 100644 --- a/src/mcp/server/stdio.py +++ b/src/mcp/server/stdio.py @@ -23,9 +23,9 @@ async def run_server(): import anyio import anyio.lowlevel -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from mcp import types +from mcp.shared._context_streams import create_context_streams from mcp.shared.message import SessionMessage @@ -43,14 +43,8 @@ async def stdio_server(stdin: anyio.AsyncFile[str] | None = None, stdout: anyio. if not stdout: stdout = anyio.wrap_file(TextIOWrapper(sys.stdout.buffer, encoding="utf-8")) - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] - read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] - - write_stream: MemoryObjectSendStream[SessionMessage] - write_stream_reader: MemoryObjectReceiveStream[SessionMessage] - - read_stream_writer, read_stream = anyio.create_memory_object_stream(0) - write_stream, write_stream_reader = anyio.create_memory_object_stream(0) + read_stream_writer, read_stream = create_context_streams[SessionMessage | Exception](0) + write_stream, write_stream_reader = create_context_streams[SessionMessage](0) async def stdin_reader(): try: diff --git a/src/mcp/server/streamable_http.py b/src/mcp/server/streamable_http.py index aa99e7c88..f14201857 100644 --- a/src/mcp/server/streamable_http.py +++ b/src/mcp/server/streamable_http.py @@ -25,6 +25,8 @@ from starlette.types import Receive, Scope, Send from mcp.server.transport_security import TransportSecurityMiddleware, TransportSecuritySettings +from mcp.shared._context_streams import ContextReceiveStream, ContextSendStream, create_context_streams +from mcp.shared._stream_protocols import ReadStream, WriteStream from mcp.shared.message import ServerMessageMetadata, SessionMessage from mcp.shared.version import SUPPORTED_PROTOCOL_VERSIONS from mcp.types import ( @@ -119,10 +121,10 @@ class StreamableHTTPServerTransport: """ # Server notification streams for POST requests as well as standalone SSE stream - _read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] | None = None - _read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] | None = None - _write_stream: MemoryObjectSendStream[SessionMessage] | None = None - _write_stream_reader: MemoryObjectReceiveStream[SessionMessage] | None = None + _read_stream_writer: ContextSendStream[SessionMessage | Exception] | None = None + _read_stream: ContextReceiveStream[SessionMessage | Exception] | None = None + _write_stream: ContextSendStream[SessionMessage] | None = None + _write_stream_reader: ContextReceiveStream[SessionMessage] | None = None _security: TransportSecurityMiddleware def __init__( @@ -954,8 +956,8 @@ async def connect( self, ) -> AsyncGenerator[ tuple[ - MemoryObjectReceiveStream[SessionMessage | Exception], - MemoryObjectSendStream[SessionMessage], + ReadStream[SessionMessage | Exception], + WriteStream[SessionMessage], ], None, ]: @@ -967,8 +969,8 @@ async def connect( # Create the memory streams for this connection - read_stream_writer, read_stream = anyio.create_memory_object_stream[SessionMessage | Exception](0) - write_stream, write_stream_reader = anyio.create_memory_object_stream[SessionMessage](0) + read_stream_writer, read_stream = create_context_streams[SessionMessage | Exception](0) + write_stream, write_stream_reader = create_context_streams[SessionMessage](0) # Store the streams self._read_stream_writer = read_stream_writer diff --git a/src/mcp/server/websocket.py b/src/mcp/server/websocket.py index 32b50560c..277f9b5af 100644 --- a/src/mcp/server/websocket.py +++ b/src/mcp/server/websocket.py @@ -1,12 +1,12 @@ from contextlib import asynccontextmanager import anyio -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from pydantic_core import ValidationError from starlette.types import Receive, Scope, Send from starlette.websockets import WebSocket from mcp import types +from mcp.shared._context_streams import create_context_streams from mcp.shared.message import SessionMessage @@ -19,14 +19,8 @@ async def websocket_server(scope: Scope, receive: Receive, send: Send): websocket = WebSocket(scope, receive, send) await websocket.accept(subprotocol="mcp") - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception] - read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception] - - write_stream: MemoryObjectSendStream[SessionMessage] - write_stream_reader: MemoryObjectReceiveStream[SessionMessage] - - read_stream_writer, read_stream = anyio.create_memory_object_stream(0) - write_stream, write_stream_reader = anyio.create_memory_object_stream(0) + read_stream_writer, read_stream = create_context_streams[SessionMessage | Exception](0) + write_stream, write_stream_reader = create_context_streams[SessionMessage](0) async def ws_reader(): try: diff --git a/src/mcp/shared/_callable_inspection.py b/src/mcp/shared/_callable_inspection.py new file mode 100644 index 000000000..0e89e446f --- /dev/null +++ b/src/mcp/shared/_callable_inspection.py @@ -0,0 +1,33 @@ +"""Callable inspection utilities. + +Adapted from Starlette's `is_async_callable` implementation. +https://github.com/encode/starlette/blob/main/starlette/_utils.py +""" + +from __future__ import annotations + +import functools +import inspect +from collections.abc import Awaitable, Callable +from typing import Any, TypeGuard, TypeVar, overload + +T = TypeVar("T") + +AwaitableCallable = Callable[..., Awaitable[T]] + + +@overload +def is_async_callable(obj: AwaitableCallable[T]) -> TypeGuard[AwaitableCallable[T]]: ... + + +@overload +def is_async_callable(obj: Any) -> TypeGuard[AwaitableCallable[Any]]: ... + + +def is_async_callable(obj: Any) -> Any: + while isinstance(obj, functools.partial): # pragma: lax no cover + obj = obj.func + + return inspect.iscoroutinefunction(obj) or ( + callable(obj) and inspect.iscoroutinefunction(getattr(obj, "__call__", None)) + ) diff --git a/src/mcp/shared/_context_streams.py b/src/mcp/shared/_context_streams.py new file mode 100644 index 000000000..04c33306d --- /dev/null +++ b/src/mcp/shared/_context_streams.py @@ -0,0 +1,119 @@ +"""Context-aware memory stream wrappers. + +anyio memory streams do not propagate ``contextvars.Context`` across task +boundaries. These thin wrappers capture the sender's context at ``send()`` +time and expose it on the receive side via ``last_context``, so consumers +can restore it with ``ctx.run(handler, item)``. + +The iteration interface is unchanged (yields ``T``, not tuples), keeping +these wrappers duck-type compatible with plain ``MemoryObjectSendStream`` +and ``MemoryObjectReceiveStream``. +""" + +from __future__ import annotations + +import contextvars +from types import TracebackType +from typing import Any, Generic, TypeVar + +import anyio +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +T = TypeVar("T") + +# Internal payload carried through the underlying raw stream. +_Envelope = tuple[contextvars.Context, T] + + +class ContextSendStream(Generic[T]): + """Send-side wrapper that snapshots ``contextvars.copy_context()`` on every ``send()``.""" + + __slots__ = ("_inner",) + + def __init__(self, inner: MemoryObjectSendStream[_Envelope[T]]) -> None: + self._inner = inner + + async def send(self, item: T) -> None: + await self._inner.send((contextvars.copy_context(), item)) + + def close(self) -> None: + self._inner.close() + + async def aclose(self) -> None: + await self._inner.aclose() + + def clone(self) -> ContextSendStream[T]: # pragma: no cover + return ContextSendStream(self._inner.clone()) + + async def __aenter__(self) -> ContextSendStream[T]: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + await self.aclose() + return None + + +class ContextReceiveStream(Generic[T]): + """Receive-side wrapper that yields ``T`` and stores the sender's context in ``last_context``.""" + + __slots__ = ("_inner", "last_context") + + def __init__(self, inner: MemoryObjectReceiveStream[_Envelope[T]]) -> None: + self._inner = inner + self.last_context: contextvars.Context | None = None + + async def receive(self) -> T: + ctx, item = await self._inner.receive() + self.last_context = ctx + return item + + def close(self) -> None: + self._inner.close() + + async def aclose(self) -> None: + await self._inner.aclose() + + def clone(self) -> ContextReceiveStream[T]: # pragma: no cover + return ContextReceiveStream(self._inner.clone()) + + def __aiter__(self) -> ContextReceiveStream[T]: + return self + + async def __anext__(self) -> T: + try: + return await self.receive() + except anyio.EndOfStream: + raise StopAsyncIteration + + async def __aenter__(self) -> ContextReceiveStream[T]: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + await self.aclose() + return None + + +class create_context_streams( + tuple[ContextSendStream[T], ContextReceiveStream[T]], +): + """Create context-aware memory object streams. + + Supports ``create_context_streams[T](n)`` bracket syntax, + matching anyio's ``create_memory_object_stream`` API style. + """ + + def __new__(cls, max_buffer_size: float = 0) -> tuple[ContextSendStream[T], ContextReceiveStream[T]]: # type: ignore[type-var] + raw_send: MemoryObjectSendStream[Any] + raw_receive: MemoryObjectReceiveStream[Any] + raw_send, raw_receive = anyio.create_memory_object_stream(max_buffer_size) + return (ContextSendStream(raw_send), ContextReceiveStream(raw_receive)) diff --git a/src/mcp/shared/_otel.py b/src/mcp/shared/_otel.py new file mode 100644 index 000000000..170e873a0 --- /dev/null +++ b/src/mcp/shared/_otel.py @@ -0,0 +1,36 @@ +"""OpenTelemetry helpers for MCP.""" + +from __future__ import annotations + +from collections.abc import Iterator +from contextlib import contextmanager +from typing import Any + +from opentelemetry.context import Context +from opentelemetry.propagate import extract, inject +from opentelemetry.trace import SpanKind, get_tracer + +_tracer = get_tracer("mcp-python-sdk") + + +@contextmanager +def otel_span( + name: str, + *, + kind: SpanKind, + attributes: dict[str, Any] | None = None, + context: Context | None = None, +) -> Iterator[Any]: + """Create an OTel span.""" + with _tracer.start_as_current_span(name, kind=kind, attributes=attributes, context=context) as span: + yield span + + +def inject_trace_context(meta: dict[str, Any]) -> None: + """Inject W3C trace context (traceparent/tracestate) into a `_meta` dict.""" + inject(meta) + + +def extract_trace_context(meta: dict[str, Any]) -> Context: + """Extract W3C trace context from a `_meta` dict.""" + return extract(meta) diff --git a/src/mcp/shared/_stream_protocols.py b/src/mcp/shared/_stream_protocols.py new file mode 100644 index 000000000..b79975132 --- /dev/null +++ b/src/mcp/shared/_stream_protocols.py @@ -0,0 +1,49 @@ +"""Stream protocols for MCP transports. + +These are general-purpose protocols satisfied by both ``MemoryObjectSendStream``/ +``MemoryObjectReceiveStream`` and the context-aware wrappers in ``_context_streams``. +""" + +from __future__ import annotations + +from types import TracebackType +from typing import Protocol, TypeVar + +from typing_extensions import Self + +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class ReadStream(Protocol[T_co]): + """Protocol for reading items from a stream. + + Consumers that need the sender's context should use + ``getattr(stream, 'last_context', None)``. + """ + + async def receive(self) -> T_co: ... + async def aclose(self) -> None: ... + def __aiter__(self) -> ReadStream[T_co]: ... + async def __anext__(self) -> T_co: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: ... + + +class WriteStream(Protocol[T_contra]): + """Protocol for writing items to a stream.""" + + async def send(self, item: T_contra, /) -> None: ... + async def aclose(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: ... diff --git a/src/mcp/shared/auth.py b/src/mcp/shared/auth.py index ca5b7b45a..ebf534d79 100644 --- a/src/mcp/shared/auth.py +++ b/src/mcp/shared/auth.py @@ -67,6 +67,24 @@ class OAuthClientMetadata(BaseModel): software_id: str | None = None software_version: str | None = None + @field_validator( + "client_uri", + "logo_uri", + "tos_uri", + "policy_uri", + "jwks_uri", + mode="before", + ) + @classmethod + def _empty_string_optional_url_to_none(cls, v: object) -> object: + # RFC 7591 §2 marks these URL fields OPTIONAL. Some authorization servers + # echo omitted metadata back as "" instead of dropping the keys, which + # AnyHttpUrl would otherwise reject — throwing away an otherwise valid + # registration response. Treat "" as absent. + if v == "": + return None + return v + def validate_scope(self, requested_scope: str | None) -> list[str] | None: if requested_scope is None: return None diff --git a/src/mcp/shared/memory.py b/src/mcp/shared/memory.py index f2d5e2b9a..468590d09 100644 --- a/src/mcp/shared/memory.py +++ b/src/mcp/shared/memory.py @@ -5,12 +5,10 @@ from collections.abc import AsyncGenerator from contextlib import asynccontextmanager -import anyio -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream - +from mcp.shared._context_streams import ContextReceiveStream, ContextSendStream, create_context_streams from mcp.shared.message import SessionMessage -MessageStream = tuple[MemoryObjectReceiveStream[SessionMessage | Exception], MemoryObjectSendStream[SessionMessage]] +MessageStream = tuple[ContextReceiveStream[SessionMessage | Exception], ContextSendStream[SessionMessage | Exception]] @asynccontextmanager @@ -22,8 +20,8 @@ async def create_client_server_memory_streams() -> AsyncGenerator[tuple[MessageS (read_stream, write_stream) """ # Create streams for both directions - server_to_client_send, server_to_client_receive = anyio.create_memory_object_stream[SessionMessage | Exception](1) - client_to_server_send, client_to_server_receive = anyio.create_memory_object_stream[SessionMessage | Exception](1) + server_to_client_send, server_to_client_receive = create_context_streams[SessionMessage | Exception](1) + client_to_server_send, client_to_server_receive = create_context_streams[SessionMessage | Exception](1) client_streams = (server_to_client_receive, client_to_server_send) server_streams = (client_to_server_receive, server_to_client_send) diff --git a/src/mcp/shared/session.py b/src/mcp/shared/session.py index 6fc59923f..243eef5ae 100644 --- a/src/mcp/shared/session.py +++ b/src/mcp/shared/session.py @@ -1,5 +1,6 @@ from __future__ import annotations +import contextvars import logging from collections.abc import Callable from contextlib import AsyncExitStack @@ -7,10 +8,13 @@ from typing import Any, Generic, Protocol, TypeVar import anyio -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream +from anyio.streams.memory import MemoryObjectSendStream +from opentelemetry.trace import SpanKind from pydantic import BaseModel, TypeAdapter from typing_extensions import Self +from mcp.shared._otel import inject_trace_context, otel_span +from mcp.shared._stream_protocols import ReadStream, WriteStream from mcp.shared.exceptions import MCPError from mcp.shared.message import MessageMetadata, ServerMessageMetadata, SessionMessage from mcp.shared.response_router import ResponseRouter @@ -79,11 +83,13 @@ def __init__( session: BaseSession[SendRequestT, SendNotificationT, SendResultT, ReceiveRequestT, ReceiveNotificationT], on_complete: Callable[[RequestResponder[ReceiveRequestT, SendResultT]], Any], message_metadata: MessageMetadata = None, + context: contextvars.Context | None = None, ) -> None: self.request_id = request_id self.request_meta = request_meta self.request = request self.message_metadata = message_metadata + self.context = context self._session = session self._completed = False self._cancel_scope = anyio.CancelScope() @@ -181,8 +187,8 @@ class BaseSession( def __init__( self, - read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], - write_stream: MemoryObjectSendStream[SessionMessage], + read_stream: ReadStream[SessionMessage | Exception], + write_stream: WriteStream[SessionMessage], # If none, reading will never time out read_timeout_seconds: float | None = None, ) -> None: @@ -264,24 +270,36 @@ async def send_request( self._progress_callbacks[request_id] = progress_callback try: - jsonrpc_request = JSONRPCRequest(jsonrpc="2.0", id=request_id, **request_data) - await self._write_stream.send(SessionMessage(message=jsonrpc_request, metadata=metadata)) - - # request read timeout takes precedence over session read timeout - timeout = request_read_timeout_seconds or self._session_read_timeout_seconds - - try: - with anyio.fail_after(timeout): - response_or_error = await response_stream_reader.receive() - except TimeoutError: - class_name = request.__class__.__name__ - message = f"Timed out while waiting for response to {class_name}. Waited {timeout} seconds." - raise MCPError(code=REQUEST_TIMEOUT, message=message) - - if isinstance(response_or_error, JSONRPCError): - raise MCPError.from_jsonrpc_error(response_or_error) - else: - return result_type.model_validate(response_or_error.result, by_name=False) + target = request_data.get("params", {}).get("name") + span_name = f"MCP send {request.method} {target}" if target else f"MCP send {request.method}" + + with otel_span( + span_name, + kind=SpanKind.CLIENT, + attributes={"mcp.method.name": request.method, "jsonrpc.request.id": request_id}, + ): + # Inject W3C trace context into _meta (SEP-414). + meta: dict[str, Any] = request_data.setdefault("params", {}).setdefault("_meta", {}) + inject_trace_context(meta) + + jsonrpc_request = JSONRPCRequest(jsonrpc="2.0", id=request_id, **request_data) + await self._write_stream.send(SessionMessage(message=jsonrpc_request, metadata=metadata)) + + # request read timeout takes precedence over session read timeout + timeout = request_read_timeout_seconds or self._session_read_timeout_seconds + + try: + with anyio.fail_after(timeout): + response_or_error = await response_stream_reader.receive() + except TimeoutError: + class_name = request.__class__.__name__ + message = f"Timed out while waiting for response to {class_name}. Waited {timeout} seconds." + raise MCPError(code=REQUEST_TIMEOUT, message=message) + + if isinstance(response_or_error, JSONRPCError): + raise MCPError.from_jsonrpc_error(response_or_error) + else: + return result_type.model_validate(response_or_error.result, by_name=False) finally: self._response_streams.pop(request_id, None) @@ -333,10 +351,10 @@ def _receive_notification_adapter(self) -> TypeAdapter[ReceiveNotificationT]: async def _receive_loop(self) -> None: async with self._read_stream, self._write_stream: try: - async for message in self._read_stream: - if isinstance(message, Exception): - await self._handle_incoming(message) - elif isinstance(message.message, JSONRPCRequest): + + async def _handle_session_message(message: SessionMessage) -> None: + sender_context: contextvars.Context | None = getattr(self._read_stream, "last_context", None) + if isinstance(message.message, JSONRPCRequest): try: validated_request = self._receive_request_adapter.validate_python( message.message.model_dump(by_alias=True, mode="json", exclude_none=True), @@ -349,6 +367,7 @@ async def _receive_loop(self) -> None: session=self, on_complete=lambda r: self._in_flight.pop(r.request_id, None), message_metadata=message.metadata, + context=sender_context, ) self._in_flight[responder.request_id] = responder await self._received_request(responder) @@ -406,6 +425,13 @@ async def _receive_loop(self) -> None: else: # Response or error await self._handle_response(message) + async for message in self._read_stream: + if isinstance(message, Exception): + await self._handle_incoming(message) + continue + + await _handle_session_message(message) + except anyio.ClosedResourceError: # This is expected when the client disconnects abruptly. # Without this handler, the exception would propagate up and diff --git a/tests/client/conftest.py b/tests/client/conftest.py index 2e39f1363..081e1d68e 100644 --- a/tests/client/conftest.py +++ b/tests/client/conftest.py @@ -4,15 +4,15 @@ from unittest.mock import patch import pytest -from anyio.streams.memory import MemoryObjectSendStream import mcp.shared.memory +from mcp.client._transport import WriteStream from mcp.shared.message import SessionMessage from mcp.types import JSONRPCNotification, JSONRPCRequest class SpyMemoryObjectSendStream: - def __init__(self, original_stream: MemoryObjectSendStream[SessionMessage]): + def __init__(self, original_stream: WriteStream[SessionMessage]): self.original_stream = original_stream self.sent_messages: list[SessionMessage] = [] diff --git a/tests/client/test_auth.py b/tests/client/test_auth.py index 5aa985e36..bb0bce4c9 100644 --- a/tests/client/test_auth.py +++ b/tests/client/test_auth.py @@ -2264,3 +2264,357 @@ async def callback_handler() -> tuple[str, str | None]: await auth_flow.asend(final_response) except StopAsyncIteration: pass + + +class TestSEP2207OfflineAccessScope: + """Test SEP-2207: offline_access scope augmentation for OIDC-flavored refresh tokens.""" + + def _make_as_metadata(self, scopes_supported: list[str] | None = None) -> OAuthMetadata: + return OAuthMetadata( + issuer=AnyHttpUrl("https://auth.example.com"), + authorization_endpoint=AnyHttpUrl("https://auth.example.com/authorize"), + token_endpoint=AnyHttpUrl("https://auth.example.com/token"), + scopes_supported=scopes_supported, + ) + + def _make_prm(self, scopes_supported: list[str] | None = None) -> ProtectedResourceMetadata: + return ProtectedResourceMetadata( + resource=AnyHttpUrl("https://api.example.com/v1/mcp"), + authorization_servers=[AnyHttpUrl("https://auth.example.com")], + scopes_supported=scopes_supported, + ) + + def test_offline_access_added_when_as_supports_and_client_has_refresh_token(self): + """offline_access is appended when AS advertises it and client supports refresh_token grant.""" + prm = self._make_prm(scopes_supported=["read", "write"]) + asm = self._make_as_metadata(scopes_supported=["read", "write", "offline_access"]) + + scopes = get_client_metadata_scopes( + www_authenticate_scope=None, + protected_resource_metadata=prm, + authorization_server_metadata=asm, + client_grant_types=["authorization_code", "refresh_token"], + ) + assert scopes == "read write offline_access" + + def test_offline_access_added_with_www_authenticate_scope(self): + """offline_access is appended even when scopes come from WWW-Authenticate header.""" + asm = self._make_as_metadata(scopes_supported=["read", "write", "offline_access"]) + + scopes = get_client_metadata_scopes( + www_authenticate_scope="read write", + protected_resource_metadata=None, + authorization_server_metadata=asm, + client_grant_types=["authorization_code", "refresh_token"], + ) + assert scopes == "read write offline_access" + + def test_offline_access_not_added_when_as_does_not_support(self): + """offline_access is not added when AS does not advertise it in scopes_supported.""" + prm = self._make_prm(scopes_supported=["read", "write"]) + asm = self._make_as_metadata(scopes_supported=["read", "write"]) + + scopes = get_client_metadata_scopes( + www_authenticate_scope=None, + protected_resource_metadata=prm, + authorization_server_metadata=asm, + client_grant_types=["authorization_code", "refresh_token"], + ) + assert scopes == "read write" + + def test_offline_access_not_added_when_client_has_no_refresh_token_grant(self): + """offline_access is not added when client does not support refresh_token grant.""" + prm = self._make_prm(scopes_supported=["read", "write"]) + asm = self._make_as_metadata(scopes_supported=["read", "write", "offline_access"]) + + scopes = get_client_metadata_scopes( + www_authenticate_scope=None, + protected_resource_metadata=prm, + authorization_server_metadata=asm, + client_grant_types=["authorization_code"], + ) + assert scopes == "read write" + + def test_offline_access_not_duplicated_when_already_present(self): + """offline_access is not added again if it already appears in the selected scopes.""" + prm = self._make_prm(scopes_supported=["read", "offline_access", "write"]) + asm = self._make_as_metadata(scopes_supported=["read", "write", "offline_access"]) + + scopes = get_client_metadata_scopes( + www_authenticate_scope=None, + protected_resource_metadata=prm, + authorization_server_metadata=asm, + client_grant_types=["authorization_code", "refresh_token"], + ) + assert scopes == "read offline_access write" + + def test_offline_access_not_added_when_no_scopes_selected(self): + """offline_access is not added when no base scopes are available (None).""" + asm = self._make_as_metadata(scopes_supported=["offline_access"]) + + scopes = get_client_metadata_scopes( + www_authenticate_scope=None, + protected_resource_metadata=None, + authorization_server_metadata=asm, + client_grant_types=["authorization_code", "refresh_token"], + ) + # When AS scopes are the only source and include offline_access, + # the base scope is "offline_access" and no duplication happens + assert scopes == "offline_access" + + def test_offline_access_not_added_when_as_scopes_supported_is_none(self): + """offline_access is not added when AS scopes_supported is None.""" + prm = self._make_prm(scopes_supported=["read", "write"]) + asm = self._make_as_metadata(scopes_supported=None) + + scopes = get_client_metadata_scopes( + www_authenticate_scope=None, + protected_resource_metadata=prm, + authorization_server_metadata=asm, + client_grant_types=["authorization_code", "refresh_token"], + ) + assert scopes == "read write" + + def test_offline_access_not_added_when_no_as_metadata(self): + """offline_access is not added when AS metadata is not available.""" + prm = self._make_prm(scopes_supported=["read", "write"]) + + scopes = get_client_metadata_scopes( + www_authenticate_scope=None, + protected_resource_metadata=prm, + authorization_server_metadata=None, + client_grant_types=["authorization_code", "refresh_token"], + ) + assert scopes == "read write" + + def test_offline_access_not_added_when_no_grant_types_provided(self): + """offline_access is not added when client_grant_types is None.""" + prm = self._make_prm(scopes_supported=["read", "write"]) + asm = self._make_as_metadata(scopes_supported=["read", "write", "offline_access"]) + + scopes = get_client_metadata_scopes( + www_authenticate_scope=None, + protected_resource_metadata=prm, + authorization_server_metadata=asm, + client_grant_types=None, + ) + assert scopes == "read write" + + def test_default_client_metadata_includes_refresh_token_grant(self): + """Default OAuthClientMetadata includes refresh_token in grant_types (SEP-2207 guidance).""" + metadata = OAuthClientMetadata(redirect_uris=[AnyUrl("http://localhost:3030/callback")]) + assert "refresh_token" in metadata.grant_types + + @pytest.mark.anyio + async def test_auth_flow_adds_offline_access_when_as_advertises( + self, client_metadata: OAuthClientMetadata, mock_storage: MockTokenStorage + ): + """E2E: auth flow includes offline_access in authorization request when AS advertises it.""" + + captured_auth_url: str | None = None + captured_state: str | None = None + + async def redirect_handler(url: str) -> None: + nonlocal captured_auth_url, captured_state + captured_auth_url = url + parsed = urlparse(url) + params = parse_qs(parsed.query) + captured_state = params.get("state", [None])[0] + + async def callback_handler() -> tuple[str, str | None]: + return "test_auth_code", captured_state + + provider = OAuthClientProvider( + server_url="https://api.example.com/v1/mcp", + client_metadata=client_metadata, + storage=mock_storage, + redirect_handler=redirect_handler, + callback_handler=callback_handler, + ) + + provider.context.current_tokens = None + provider.context.token_expiry_time = None + provider._initialized = True + + # Pre-set client info to skip DCR + provider.context.client_info = OAuthClientInformationFull( + client_id="test_client", + client_secret="test_secret", + redirect_uris=[AnyUrl("http://localhost:3030/callback")], + ) + + test_request = httpx.Request("GET", "https://api.example.com/v1/mcp") + auth_flow = provider.async_auth_flow(test_request) + + # First request + request = await auth_flow.__anext__() + assert "Authorization" not in request.headers + + # Send 401 + response = httpx.Response(401, headers={}, request=test_request) + + # PRM discovery + prm_request = await auth_flow.asend(response) + prm_response = httpx.Response( + 200, + content=( + b'{"resource": "https://api.example.com/v1/mcp",' + b' "authorization_servers": ["https://auth.example.com"],' + b' "scopes_supported": ["read", "write"]}' + ), + request=prm_request, + ) + + # OAuth metadata discovery - AS advertises offline_access + oauth_request = await auth_flow.asend(prm_response) + oauth_response = httpx.Response( + 200, + content=( + b'{"issuer": "https://auth.example.com",' + b' "authorization_endpoint": "https://auth.example.com/authorize",' + b' "token_endpoint": "https://auth.example.com/token",' + b' "scopes_supported": ["read", "write", "offline_access"]}' + ), + request=oauth_request, + ) + + # This triggers authorization, which calls redirect_handler + token_request = await auth_flow.asend(oauth_response) + + # Verify the authorization URL included offline_access in the scope + assert captured_auth_url is not None + parsed = urlparse(captured_auth_url) + params = parse_qs(parsed.query) + scope_value = params["scope"][0] + scope_parts = scope_value.split() + assert "offline_access" in scope_parts + assert "read" in scope_parts + assert "write" in scope_parts + + # OIDC requires prompt=consent when offline_access is requested + assert params["prompt"][0] == "consent" + + # Complete the token exchange + token_response = httpx.Response( + 200, + content=( + b'{"access_token": "new_access_token", "token_type": "Bearer",' + b' "expires_in": 3600, "refresh_token": "new_refresh_token"}' + ), + request=token_request, + ) + + final_request = await auth_flow.asend(token_response) + assert final_request.headers["Authorization"] == "Bearer new_access_token" + + # Close the generator + final_response = httpx.Response(200, request=final_request) + try: + await auth_flow.asend(final_response) + except StopAsyncIteration: + pass + + @pytest.mark.anyio + async def test_auth_flow_no_offline_access_when_as_does_not_advertise( + self, client_metadata: OAuthClientMetadata, mock_storage: MockTokenStorage + ): + """E2E: auth flow does NOT include offline_access when AS doesn't advertise it.""" + + captured_auth_url: str | None = None + captured_state: str | None = None + + async def redirect_handler(url: str) -> None: + nonlocal captured_auth_url, captured_state + captured_auth_url = url + parsed = urlparse(url) + params = parse_qs(parsed.query) + captured_state = params.get("state", [None])[0] + + async def callback_handler() -> tuple[str, str | None]: + return "test_auth_code", captured_state + + provider = OAuthClientProvider( + server_url="https://api.example.com/v1/mcp", + client_metadata=client_metadata, + storage=mock_storage, + redirect_handler=redirect_handler, + callback_handler=callback_handler, + ) + + provider.context.current_tokens = None + provider.context.token_expiry_time = None + provider._initialized = True + + # Pre-set client info to skip DCR + provider.context.client_info = OAuthClientInformationFull( + client_id="test_client", + client_secret="test_secret", + redirect_uris=[AnyUrl("http://localhost:3030/callback")], + ) + + test_request = httpx.Request("GET", "https://api.example.com/v1/mcp") + auth_flow = provider.async_auth_flow(test_request) + + # First request + await auth_flow.__anext__() + + # Send 401 + response = httpx.Response(401, headers={}, request=test_request) + + # PRM discovery + prm_request = await auth_flow.asend(response) + prm_response = httpx.Response( + 200, + content=( + b'{"resource": "https://api.example.com/v1/mcp",' + b' "authorization_servers": ["https://auth.example.com"],' + b' "scopes_supported": ["read", "write"]}' + ), + request=prm_request, + ) + + # OAuth metadata discovery - AS does NOT advertise offline_access + oauth_request = await auth_flow.asend(prm_response) + oauth_response = httpx.Response( + 200, + content=( + b'{"issuer": "https://auth.example.com",' + b' "authorization_endpoint": "https://auth.example.com/authorize",' + b' "token_endpoint": "https://auth.example.com/token",' + b' "scopes_supported": ["read", "write"]}' + ), + request=oauth_request, + ) + + # This triggers authorization, which calls redirect_handler + token_request = await auth_flow.asend(oauth_response) + + # Verify the authorization URL does NOT include offline_access + assert captured_auth_url is not None + parsed = urlparse(captured_auth_url) + params = parse_qs(parsed.query) + scope_value = params["scope"][0] + scope_parts = scope_value.split() + assert "offline_access" not in scope_parts + assert "read" in scope_parts + assert "write" in scope_parts + + # prompt=consent should NOT be present without offline_access + assert "prompt" not in params + + # Complete the token exchange + token_response = httpx.Response( + 200, + content=b'{"access_token": "new_access_token", "token_type": "Bearer", "expires_in": 3600}', + request=token_request, + ) + + final_request = await auth_flow.asend(token_response) + assert final_request.headers["Authorization"] == "Bearer new_access_token" + + # Close the generator + final_response = httpx.Response(200, request=final_request) + try: + await auth_flow.asend(final_response) + except StopAsyncIteration: + pass diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 18368e6bb..ac52a9024 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -2,6 +2,9 @@ from __future__ import annotations +import contextvars +from collections.abc import Iterator +from contextlib import contextmanager from unittest.mock import patch import anyio @@ -320,3 +323,33 @@ async def test_client_uses_transport_directly(app: MCPServer): structured_content={"result": "Hello, Transport!"}, ) ) + + +_TEST_CONTEXTVAR = contextvars.ContextVar("test_var", default="initial") + + +@contextmanager +def _set_test_contextvar(value: str) -> Iterator[None]: + token = _TEST_CONTEXTVAR.set(value) + try: + yield + finally: + _TEST_CONTEXTVAR.reset(token) + + +async def test_context_propagation(): + """Sender's contextvars.Context is propagated to the server handler.""" + server = MCPServer("test") + + @server.tool() + async def check_context() -> str: + """Return the contextvar value visible to the handler.""" + return _TEST_CONTEXTVAR.get() + + async with Client(server) as client: + with _set_test_contextvar("client_value"): + result = await client.call_tool("check_context", {}) + + assert result.content[0].text == "client_value", ( # type: ignore[union-attr] + "Server handler did not see the sender's contextvars.Context" + ) diff --git a/tests/client/test_logging_callback.py b/tests/client/test_logging_callback.py index 1598fd55f..454c1d338 100644 --- a/tests/client/test_logging_callback.py +++ b/tests/client/test_logging_callback.py @@ -1,4 +1,4 @@ -from typing import Any, Literal +from typing import Literal import pytest @@ -36,24 +36,20 @@ async def test_tool_with_log( message: str, level: Literal["debug", "info", "warning", "error"], logger: str, ctx: Context ) -> bool: """Send a log notification to the client.""" - await ctx.log(level=level, message=message, logger_name=logger) + await ctx.log(level=level, data=message, logger_name=logger) return True - @server.tool("test_tool_with_log_extra") - async def test_tool_with_log_extra( - message: str, + @server.tool("test_tool_with_log_dict") + async def test_tool_with_log_dict( level: Literal["debug", "info", "warning", "error"], logger: str, - extra_string: str, - extra_dict: dict[str, Any], ctx: Context, ) -> bool: - """Send a log notification to the client with extra fields.""" + """Send a log notification with a dict payload.""" await ctx.log( level=level, - message=message, + data={"message": "Test log message", "extra_string": "example", "extra_dict": {"a": 1, "b": 2, "c": 3}}, logger_name=logger, - extra={"extra_string": extra_string, "extra_dict": extra_dict}, ) return True @@ -84,18 +80,15 @@ async def message_handler( "logger": "test_logger", }, ) - log_result_with_extra = await client.call_tool( - "test_tool_with_log_extra", + log_result_with_dict = await client.call_tool( + "test_tool_with_log_dict", { - "message": "Test log message", "level": "info", "logger": "test_logger", - "extra_string": "example", - "extra_dict": {"a": 1, "b": 2, "c": 3}, }, ) assert log_result.is_error is False - assert log_result_with_extra.is_error is False + assert log_result_with_dict.is_error is False assert len(logging_collector.log_messages) == 2 # Create meta object with related_request_id added dynamically log = logging_collector.log_messages[0] @@ -103,10 +96,10 @@ async def message_handler( assert log.logger == "test_logger" assert log.data == "Test log message" - log_with_extra = logging_collector.log_messages[1] - assert log_with_extra.level == "info" - assert log_with_extra.logger == "test_logger" - assert log_with_extra.data == { + log_with_dict = logging_collector.log_messages[1] + assert log_with_dict.level == "info" + assert log_with_dict.logger == "test_logger" + assert log_with_dict.data == { "message": "Test log message", "extra_string": "example", "extra_dict": {"a": 1, "b": 2, "c": 3}, diff --git a/tests/server/mcpserver/prompts/test_base.py b/tests/server/mcpserver/prompts/test_base.py index fe18e91bd..d4e4e6b5a 100644 --- a/tests/server/mcpserver/prompts/test_base.py +++ b/tests/server/mcpserver/prompts/test_base.py @@ -1,3 +1,4 @@ +import threading from typing import Any import pytest @@ -190,3 +191,21 @@ async def fn() -> dict[str, Any]: ) ) ] + + +@pytest.mark.anyio +async def test_sync_fn_runs_in_worker_thread(): + """Sync prompt functions must run in a worker thread, not the event loop.""" + + main_thread = threading.get_ident() + fn_thread: list[int] = [] + + def blocking_fn() -> str: + fn_thread.append(threading.get_ident()) + return "hello" + + prompt = Prompt.from_function(blocking_fn) + messages = await prompt.render(None, Context()) + + assert messages == [UserMessage(content=TextContent(type="text", text="hello"))] + assert fn_thread[0] != main_thread diff --git a/tests/server/mcpserver/resources/test_function_resources.py b/tests/server/mcpserver/resources/test_function_resources.py index 5f5c216ed..c1ff96061 100644 --- a/tests/server/mcpserver/resources/test_function_resources.py +++ b/tests/server/mcpserver/resources/test_function_resources.py @@ -1,3 +1,7 @@ +import threading + +import anyio +import anyio.from_thread import pytest from pydantic import BaseModel @@ -190,3 +194,51 @@ def get_data() -> str: # pragma: no cover ) assert resource.meta is None + + +@pytest.mark.anyio +async def test_sync_fn_runs_in_worker_thread(): + """Sync resource functions must run in a worker thread, not the event loop.""" + + main_thread = threading.get_ident() + fn_thread: list[int] = [] + + def blocking_fn() -> str: + fn_thread.append(threading.get_ident()) + return "data" + + resource = FunctionResource(uri="resource://test", name="test", fn=blocking_fn) + result = await resource.read() + + assert result == "data" + assert fn_thread[0] != main_thread + + +@pytest.mark.anyio +async def test_sync_fn_does_not_block_event_loop(): + """A blocking sync resource function must not stall the event loop. + + On regression (sync runs inline), anyio.from_thread.run_sync raises + RuntimeError because there is no worker-thread context, failing fast. + """ + handler_entered = anyio.Event() + release = threading.Event() + + def blocking_fn() -> str: + anyio.from_thread.run_sync(handler_entered.set) + release.wait() + return "done" + + resource = FunctionResource(uri="resource://test", name="test", fn=blocking_fn) + result: list[str | bytes] = [] + + async def run() -> None: + result.append(await resource.read()) + + with anyio.fail_after(5): + async with anyio.create_task_group() as tg: + tg.start_soon(run) + await handler_entered.wait() + release.set() + + assert result == ["done"] diff --git a/tests/server/mcpserver/resources/test_resource_manager.py b/tests/server/mcpserver/resources/test_resource_manager.py index 724b57997..b91c71581 100644 --- a/tests/server/mcpserver/resources/test_resource_manager.py +++ b/tests/server/mcpserver/resources/test_resource_manager.py @@ -1,5 +1,5 @@ +import logging from pathlib import Path -from tempfile import NamedTemporaryFile import pytest from pydantic import AnyUrl @@ -8,170 +8,134 @@ from mcp.server.mcpserver.resources import FileResource, FunctionResource, ResourceManager, ResourceTemplate -@pytest.fixture -def temp_file(): +@pytest.fixture() +def temp_file(tmp_path: Path): """Create a temporary file for testing. File is automatically cleaned up after the test if it still exists. """ - content = "test content" - with NamedTemporaryFile(mode="w", delete=False) as f: - f.write(content) - path = Path(f.name).resolve() - yield path - try: # pragma: lax no cover - path.unlink() - except FileNotFoundError: # pragma: lax no cover - pass # File was already deleted by the test - - -class TestResourceManager: - """Test ResourceManager functionality.""" - - def test_add_resource(self, temp_file: Path): - """Test adding a resource.""" - manager = ResourceManager() - resource = FileResource( - uri=f"file://{temp_file}", - name="test", - path=temp_file, - ) - added = manager.add_resource(resource) - assert added == resource - assert manager.list_resources() == [resource] - - def test_add_duplicate_resource(self, temp_file: Path): - """Test adding the same resource twice.""" - manager = ResourceManager() - resource = FileResource( - uri=f"file://{temp_file}", - name="test", - path=temp_file, - ) - first = manager.add_resource(resource) - second = manager.add_resource(resource) - assert first == second - assert manager.list_resources() == [resource] - - def test_warn_on_duplicate_resources(self, temp_file: Path, caplog: pytest.LogCaptureFixture): - """Test warning on duplicate resources.""" - manager = ResourceManager() - resource = FileResource( - uri=f"file://{temp_file}", - name="test", - path=temp_file, - ) - manager.add_resource(resource) - manager.add_resource(resource) - assert "Resource already exists" in caplog.text - - def test_disable_warn_on_duplicate_resources(self, temp_file: Path, caplog: pytest.LogCaptureFixture): - """Test disabling warning on duplicate resources.""" - manager = ResourceManager(warn_on_duplicate_resources=False) - resource = FileResource( - uri=f"file://{temp_file}", - name="test", - path=temp_file, - ) - manager.add_resource(resource) - manager.add_resource(resource) - assert "Resource already exists" not in caplog.text - - @pytest.mark.anyio - async def test_get_resource(self, temp_file: Path): - """Test getting a resource by URI.""" - manager = ResourceManager() - resource = FileResource( - uri=f"file://{temp_file}", - name="test", - path=temp_file, - ) - manager.add_resource(resource) - retrieved = await manager.get_resource(resource.uri, Context()) - assert retrieved == resource - - @pytest.mark.anyio - async def test_get_resource_from_template(self): - """Test getting a resource through a template.""" - manager = ResourceManager() - - def greet(name: str) -> str: - return f"Hello, {name}!" - - template = ResourceTemplate.from_function( - fn=greet, - uri_template="greet://{name}", - name="greeter", - ) - manager._templates[template.uri_template] = template - - resource = await manager.get_resource(AnyUrl("greet://world"), Context()) - assert isinstance(resource, FunctionResource) - content = await resource.read() - assert content == "Hello, world!" - - @pytest.mark.anyio - async def test_get_unknown_resource(self): - """Test getting a non-existent resource.""" - manager = ResourceManager() - with pytest.raises(ValueError, match="Unknown resource"): - await manager.get_resource(AnyUrl("unknown://test"), Context()) - - def test_list_resources(self, temp_file: Path): - """Test listing all resources.""" - manager = ResourceManager() - resource1 = FileResource( - uri=f"file://{temp_file}", - name="test1", - path=temp_file, - ) - resource2 = FileResource( - uri=f"file://{temp_file}2", - name="test2", - path=temp_file, - ) - manager.add_resource(resource1) - manager.add_resource(resource2) - resources = manager.list_resources() - assert len(resources) == 2 - assert resources == [resource1, resource2] - - -class TestResourceManagerMetadata: - """Test ResourceManager Metadata""" - - def test_add_template_with_metadata(self): - """Test that ResourceManager.add_template() accepts and passes meta parameter.""" - - manager = ResourceManager() - - def get_item(id: str) -> str: # pragma: no cover - return f"Item {id}" - - metadata = {"source": "database", "cached": True} - - template = manager.add_template( - fn=get_item, - uri_template="resource://items/{id}", - meta=metadata, - ) - - assert template.meta is not None - assert template.meta == metadata - assert template.meta["source"] == "database" - assert template.meta["cached"] is True - - def test_add_template_without_metadata(self): - """Test that ResourceManager.add_template() works without meta parameter.""" - - manager = ResourceManager() - - def get_item(id: str) -> str: # pragma: no cover - return f"Item {id}" - - template = manager.add_template( - fn=get_item, - uri_template="resource://items/{id}", - ) - - assert template.meta is None + tmp_file = tmp_path / "file" + tmp_file.touch() + yield tmp_file + + +def test_init_with_resources(temp_file: Path, caplog: pytest.LogCaptureFixture): + resource = FileResource(uri=f"file://{temp_file}", name="test", path=temp_file) + manager = ResourceManager(resources=[resource]) + assert manager.list_resources() == [resource] + + duplicate_resource = FileResource(uri=f"file://{temp_file}", name="duplicate", path=temp_file) + + with caplog.at_level(logging.WARNING): + manager = ResourceManager(True, resources=[resource, duplicate_resource]) + + assert "Resource already exists" in caplog.text + assert manager.list_resources() == [resource] + + +def test_add_resource(temp_file: Path): + """Test adding a resource.""" + manager = ResourceManager() + resource = FileResource(uri=f"file://{temp_file}", name="test", path=temp_file) + added = manager.add_resource(resource) + assert added == resource + assert manager.list_resources() == [resource] + + +def test_add_duplicate_resource(temp_file: Path): + """Test adding the same resource twice.""" + manager = ResourceManager() + resource = FileResource(uri=f"file://{temp_file}", name="test", path=temp_file) + first = manager.add_resource(resource) + second = manager.add_resource(resource) + assert first == second + assert manager.list_resources() == [resource] + + +def test_warn_on_duplicate_resources(temp_file: Path, caplog: pytest.LogCaptureFixture): + """Test warning on duplicate resources.""" + manager = ResourceManager() + resource = FileResource(uri=f"file://{temp_file}", name="test", path=temp_file) + manager.add_resource(resource) + manager.add_resource(resource) + assert "Resource already exists" in caplog.text + + +def test_disable_warn_on_duplicate_resources(temp_file: Path, caplog: pytest.LogCaptureFixture): + """Test disabling warning on duplicate resources.""" + manager = ResourceManager(warn_on_duplicate_resources=False) + resource = FileResource(uri=f"file://{temp_file}", name="test", path=temp_file) + manager.add_resource(resource) + manager.add_resource(resource) + assert "Resource already exists" not in caplog.text + + +@pytest.mark.anyio +async def test_get_resource(temp_file: Path): + """Test getting a resource by URI.""" + manager = ResourceManager() + resource = FileResource(uri=f"file://{temp_file}", name="test", path=temp_file) + manager.add_resource(resource) + retrieved = await manager.get_resource(resource.uri, Context()) + assert retrieved == resource + + +@pytest.mark.anyio +async def test_get_resource_from_template(): + """Test getting a resource through a template.""" + manager = ResourceManager() + + def greet(name: str) -> str: + return f"Hello, {name}!" + + template = ResourceTemplate.from_function(fn=greet, uri_template="greet://{name}", name="greeter") + manager._templates[template.uri_template] = template + + resource = await manager.get_resource(AnyUrl("greet://world"), Context()) + assert isinstance(resource, FunctionResource) + content = await resource.read() + assert content == "Hello, world!" + + +@pytest.mark.anyio +async def test_get_unknown_resource(): + """Test getting a non-existent resource.""" + manager = ResourceManager() + with pytest.raises(ValueError, match="Unknown resource"): + await manager.get_resource(AnyUrl("unknown://test"), Context()) + + +def test_list_resources(temp_file: Path): + """Test listing all resources.""" + manager = ResourceManager() + resource1 = FileResource(uri=f"file://{temp_file}", name="test1", path=temp_file) + resource2 = FileResource(uri=f"file://{temp_file}2", name="test2", path=temp_file) + + manager.add_resource(resource1) + manager.add_resource(resource2) + + resources = manager.list_resources() + assert len(resources) == 2 + assert resources == [resource1, resource2] + + +def get_item(id: str) -> str: ... + + +def test_add_template_with_metadata(): + """Test that ResourceManager.add_template() accepts and passes meta parameter.""" + manager = ResourceManager() + metadata = {"source": "database", "cached": True} + template = manager.add_template(fn=get_item, uri_template="resource://items/{id}", meta=metadata) + + assert template.meta is not None + assert template.meta == metadata + assert template.meta["source"] == "database" + assert template.meta["cached"] is True + + +def test_add_template_without_metadata(): + """Test that ResourceManager.add_template() works without meta parameter.""" + manager = ResourceManager() + template = manager.add_template(fn=get_item, uri_template="resource://items/{id}") + assert template.meta is None diff --git a/tests/server/mcpserver/resources/test_resource_template.py b/tests/server/mcpserver/resources/test_resource_template.py index 640cfe803..2a7ba8d50 100644 --- a/tests/server/mcpserver/resources/test_resource_template.py +++ b/tests/server/mcpserver/resources/test_resource_template.py @@ -1,4 +1,5 @@ import json +import threading from typing import Any import pytest @@ -310,3 +311,22 @@ def get_item(item_id: str) -> str: assert resource.meta == metadata assert resource.meta["category"] == "inventory" assert resource.meta["cacheable"] is True + + +@pytest.mark.anyio +async def test_sync_fn_runs_in_worker_thread(): + """Sync template functions must run in a worker thread, not the event loop.""" + + main_thread = threading.get_ident() + fn_thread: list[int] = [] + + def blocking_fn(name: str) -> str: + fn_thread.append(threading.get_ident()) + return f"hello {name}" + + template = ResourceTemplate.from_function(fn=blocking_fn, uri_template="test://{name}") + resource = await template.create_resource("test://world", {"name": "world"}, Context()) + + assert isinstance(resource, FunctionResource) + assert await resource.read() == "hello world" + assert fn_thread[0] != main_thread diff --git a/tests/server/mcpserver/test_server.py b/tests/server/mcpserver/test_server.py index 3ef06d038..3457ec944 100644 --- a/tests/server/mcpserver/test_server.py +++ b/tests/server/mcpserver/test_server.py @@ -65,6 +65,15 @@ async def test_create_server(self): assert len(mcp.icons) == 1 assert mcp.icons[0].src == "https://example.com/icon.png" + def test_dependencies(self): + """Dependencies list is read by `mcp install` / `mcp dev` CLI commands.""" + mcp = MCPServer("test", dependencies=["pandas", "numpy"]) + assert mcp.dependencies == ["pandas", "numpy"] + assert mcp.settings.dependencies == ["pandas", "numpy"] + + mcp_no_deps = MCPServer("test") + assert mcp_no_deps.dependencies == [] + async def test_sse_app_returns_starlette_app(self): """Test that sse_app returns a Starlette application with correct routes.""" mcp = MCPServer("test") @@ -676,6 +685,32 @@ async def test_remove_tool_and_call(self): class TestServerResources: + async def test_init_with_resources(self): + def get_text() -> str: + """Seeded resource.""" + return "Hello from init!" + + resource = FunctionResource.from_function(fn=get_text, uri="resource://init", name="init_resource") + + mcp = MCPServer(resources=[resource]) + + async with Client(mcp) as client: + assert client.initialize_result.capabilities.resources is not None + + resources = await client.list_resources() + assert len(resources.resources) == 1 + listed = resources.resources[0] + assert listed.uri == "resource://init" + assert listed.name == "init_resource" + assert listed.description == "Seeded resource." + + result = await client.read_resource("resource://init") + + assert len(result.contents) == 1 + content = result.contents[0] + assert isinstance(content, TextResourceContents) + assert content.text == "Hello from init!" + async def test_text_resource(self): mcp = MCPServer() diff --git a/tests/shared/test_auth.py b/tests/shared/test_auth.py index cd3c35332..7463bc5a8 100644 --- a/tests/shared/test_auth.py +++ b/tests/shared/test_auth.py @@ -1,6 +1,9 @@ """Tests for OAuth 2.0 shared code.""" -from mcp.shared.auth import OAuthMetadata +import pytest +from pydantic import ValidationError + +from mcp.shared.auth import OAuthClientInformationFull, OAuthClientMetadata, OAuthMetadata def test_oauth(): @@ -58,3 +61,80 @@ def test_oauth_with_jarm(): "token_endpoint_auth_methods_supported": ["client_secret_basic", "client_secret_post"], } ) + + +# RFC 7591 §2 marks client_uri/logo_uri/tos_uri/policy_uri/jwks_uri as OPTIONAL. +# Some authorization servers echo the client's omitted metadata back as "" +# instead of dropping the keys; without coercion, AnyHttpUrl rejects "" and +# the whole registration response is thrown away even though the server +# returned a valid client_id. + + +@pytest.mark.parametrize( + "empty_field", + ["client_uri", "logo_uri", "tos_uri", "policy_uri", "jwks_uri"], +) +def test_optional_url_empty_string_coerced_to_none(empty_field: str): + data = { + "redirect_uris": ["https://example.com/callback"], + empty_field: "", + } + metadata = OAuthClientMetadata.model_validate(data) + assert getattr(metadata, empty_field) is None + + +def test_all_optional_urls_empty_together(): + data = { + "redirect_uris": ["https://example.com/callback"], + "client_uri": "", + "logo_uri": "", + "tos_uri": "", + "policy_uri": "", + "jwks_uri": "", + } + metadata = OAuthClientMetadata.model_validate(data) + assert metadata.client_uri is None + assert metadata.logo_uri is None + assert metadata.tos_uri is None + assert metadata.policy_uri is None + assert metadata.jwks_uri is None + + +def test_valid_url_passes_through_unchanged(): + data = { + "redirect_uris": ["https://example.com/callback"], + "client_uri": "https://udemy.com/", + } + metadata = OAuthClientMetadata.model_validate(data) + assert str(metadata.client_uri) == "https://udemy.com/" + + +def test_information_full_inherits_coercion(): + """OAuthClientInformationFull subclasses OAuthClientMetadata, so the + same coercion applies to DCR responses parsed via the full model.""" + data = { + "client_id": "abc123", + "redirect_uris": ["https://example.com/callback"], + "client_uri": "", + "logo_uri": "", + "tos_uri": "", + "policy_uri": "", + "jwks_uri": "", + } + info = OAuthClientInformationFull.model_validate(data) + assert info.client_id == "abc123" + assert info.client_uri is None + assert info.logo_uri is None + assert info.tos_uri is None + assert info.policy_uri is None + assert info.jwks_uri is None + + +def test_invalid_non_empty_url_still_rejected(): + """Coercion must only touch empty strings — garbage URLs still raise.""" + data = { + "redirect_uris": ["https://example.com/callback"], + "client_uri": "not a url", + } + with pytest.raises(ValidationError): + OAuthClientMetadata.model_validate(data) diff --git a/tests/shared/test_otel.py b/tests/shared/test_otel.py new file mode 100644 index 000000000..ec7ff78cc --- /dev/null +++ b/tests/shared/test_otel.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +import pytest +from logfire.testing import CaptureLogfire + +from mcp import types +from mcp.client.client import Client +from mcp.server.mcpserver import MCPServer + +pytestmark = pytest.mark.anyio + + +# Logfire warns about propagated trace context by default (distributed_tracing=None). +# This is expected here since we're testing cross-boundary context propagation. +@pytest.mark.filterwarnings("ignore::RuntimeWarning") +async def test_client_and_server_spans(capfire: CaptureLogfire): + """Verify that calling a tool produces client and server spans with correct attributes.""" + server = MCPServer("test") + + @server.tool() + def greet(name: str) -> str: + """Greet someone.""" + return f"Hello, {name}!" + + async with Client(server) as client: + result = await client.call_tool("greet", {"name": "World"}) + + assert isinstance(result.content[0], types.TextContent) + assert result.content[0].text == "Hello, World!" + + spans = capfire.exporter.exported_spans_as_dict() + span_names = {s["name"] for s in spans} + + assert "MCP send tools/call greet" in span_names + assert "MCP handle tools/call greet" in span_names + + client_span = next(s for s in spans if s["name"] == "MCP send tools/call greet") + server_span = next(s for s in spans if s["name"] == "MCP handle tools/call greet") + + assert client_span["attributes"]["mcp.method.name"] == "tools/call" + assert server_span["attributes"]["mcp.method.name"] == "tools/call" + + # Server span should be in the same trace as the client span (context propagation). + assert server_span["context"]["trace_id"] == client_span["context"]["trace_id"] diff --git a/tests/shared/test_streamable_http.py b/tests/shared/test_streamable_http.py index f8ca30441..3d5770fb6 100644 --- a/tests/shared/test_streamable_http.py +++ b/tests/shared/test_streamable_http.py @@ -45,6 +45,7 @@ from mcp.server.streamable_http_manager import StreamableHTTPSessionManager from mcp.server.transport_security import TransportSecuritySettings from mcp.shared._context import RequestContext +from mcp.shared._context_streams import create_context_streams from mcp.shared._httpx_utils import ( MCP_DEFAULT_SSE_READ_TIMEOUT, MCP_DEFAULT_TIMEOUT, @@ -1783,8 +1784,8 @@ async def test_handle_sse_event_skips_empty_data(): # Create a mock SSE event with empty data (keep-alive ping) mock_sse = ServerSentEvent(event="message", data="", id=None, retry=None) - # Create a mock stream writer - write_stream, read_stream = anyio.create_memory_object_stream[SessionMessage | Exception](1) + # Create a context-aware stream writer (matches StreamWriter type alias) + write_stream, read_stream = create_context_streams[SessionMessage | Exception](1) try: # Call _handle_sse_event with empty data - should return False and not raise @@ -1794,8 +1795,9 @@ async def test_handle_sse_event_skips_empty_data(): assert result is False # Nothing should have been written to the stream - # Check buffer is empty (statistics().current_buffer_used returns buffer size) - assert write_stream.statistics().current_buffer_used == 0 + with pytest.raises(TimeoutError): + with anyio.fail_after(0): + await read_stream.receive() finally: await write_stream.aclose() await read_stream.aclose() diff --git a/uv.lock b/uv.lock index 4af3532ea..705d014aa 100644 --- a/uv.lock +++ b/uv.lock @@ -448,62 +448,62 @@ toml = [ [[package]] name = "cryptography" -version = "46.0.5" +version = "46.0.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, - { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, - { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, - { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, - { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, - { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, - { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, - { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, - { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, - { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, - { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, - { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, - { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, - { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, - { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, - { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, - { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, - { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, - { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, - { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, - { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, - { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, - { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, - { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, - { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, - { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, - { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, - { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, - { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, - { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, - { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, - { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, - { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, - { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, - { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, - { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, - { url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" }, - { url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" }, - { url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" }, - { url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" }, - { url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" }, - { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" }, + { url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" }, + { url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" }, + { url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" }, + { url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" }, + { url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" }, + { url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" }, + { url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" }, + { url = "https://files.pythonhosted.org/packages/7b/56/15619b210e689c5403bb0540e4cb7dbf11a6bf42e483b7644e471a2812b3/cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842", size = 7119671, upload-time = "2026-04-08T01:56:44Z" }, + { url = "https://files.pythonhosted.org/packages/74/66/e3ce040721b0b5599e175ba91ab08884c75928fbeb74597dd10ef13505d2/cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c", size = 4268551, upload-time = "2026-04-08T01:56:46.071Z" }, + { url = "https://files.pythonhosted.org/packages/03/11/5e395f961d6868269835dee1bafec6a1ac176505a167f68b7d8818431068/cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902", size = 4408887, upload-time = "2026-04-08T01:56:47.718Z" }, + { url = "https://files.pythonhosted.org/packages/40/53/8ed1cf4c3b9c8e611e7122fb56f1c32d09e1fff0f1d77e78d9ff7c82653e/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d", size = 4271354, upload-time = "2026-04-08T01:56:49.312Z" }, + { url = "https://files.pythonhosted.org/packages/50/46/cf71e26025c2e767c5609162c866a78e8a2915bbcfa408b7ca495c6140c4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022", size = 4905845, upload-time = "2026-04-08T01:56:50.916Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ea/01276740375bac6249d0a971ebdf6b4dc9ead0ee0a34ef3b5a88c1a9b0d4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce", size = 4444641, upload-time = "2026-04-08T01:56:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/3d/4c/7d258f169ae71230f25d9f3d06caabcff8c3baf0978e2b7d65e0acac3827/cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f", size = 3967749, upload-time = "2026-04-08T01:56:54.597Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/2ea0767cad19e71b3530e4cad9605d0b5e338b6a1e72c37c9c1ceb86c333/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99", size = 4270942, upload-time = "2026-04-08T01:56:56.416Z" }, + { url = "https://files.pythonhosted.org/packages/41/3d/fe14df95a83319af25717677e956567a105bb6ab25641acaa093db79975d/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1", size = 4871079, upload-time = "2026-04-08T01:56:58.31Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/4a479e0f36f8f378d397f4eab4c850b4ffb79a2f0d58704b8fa0703ddc11/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2", size = 4443999, upload-time = "2026-04-08T01:57:00.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/17/b59a741645822ec6d04732b43c5d35e4ef58be7bfa84a81e5ae6f05a1d33/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e", size = 4399191, upload-time = "2026-04-08T01:57:02.654Z" }, + { url = "https://files.pythonhosted.org/packages/59/6a/bb2e166d6d0e0955f1e9ff70f10ec4b2824c9cfcdb4da772c7dd69cc7d80/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee", size = 4655782, upload-time = "2026-04-08T01:57:04.592Z" }, + { url = "https://files.pythonhosted.org/packages/95/b6/3da51d48415bcb63b00dc17c2eff3a651b7c4fed484308d0f19b30e8cb2c/cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298", size = 3002227, upload-time = "2026-04-08T01:57:06.91Z" }, + { url = "https://files.pythonhosted.org/packages/32/a8/9f0e4ed57ec9cebe506e58db11ae472972ecb0c659e4d52bbaee80ca340a/cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb", size = 3475332, upload-time = "2026-04-08T01:57:08.807Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" }, + { url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" }, + { url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" }, + { url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" }, + { url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" }, + { url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" }, + { url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" }, + { url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" }, + { url = "https://files.pythonhosted.org/packages/63/0c/dca8abb64e7ca4f6b2978769f6fea5ad06686a190cec381f0a796fdcaaba/cryptography-46.0.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fc9ab8856ae6cf7c9358430e49b368f3108f050031442eaeb6b9d87e4dcf4e4f", size = 3476879, upload-time = "2026-04-08T01:57:38.664Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ea/075aac6a84b7c271578d81a2f9968acb6e273002408729f2ddff517fed4a/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d3b99c535a9de0adced13d159c5a9cf65c325601aa30f4be08afd680643e9c15", size = 4219700, upload-time = "2026-04-08T01:57:40.625Z" }, + { url = "https://files.pythonhosted.org/packages/6c/7b/1c55db7242b5e5612b29fc7a630e91ee7a6e3c8e7bf5406d22e206875fbd/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d02c738dacda7dc2a74d1b2b3177042009d5cab7c7079db74afc19e56ca1b455", size = 4385982, upload-time = "2026-04-08T01:57:42.725Z" }, + { url = "https://files.pythonhosted.org/packages/cb/da/9870eec4b69c63ef5925bf7d8342b7e13bc2ee3d47791461c4e49ca212f4/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:04959522f938493042d595a736e7dbdff6eb6cc2339c11465b3ff89343b65f65", size = 4219115, upload-time = "2026-04-08T01:57:44.939Z" }, + { url = "https://files.pythonhosted.org/packages/f4/72/05aa5832b82dd341969e9a734d1812a6aadb088d9eb6f0430fc337cc5a8f/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3986ac1dee6def53797289999eabe84798ad7817f3e97779b5061a95b0ee4968", size = 4385479, upload-time = "2026-04-08T01:57:46.86Z" }, + { url = "https://files.pythonhosted.org/packages/20/2a/1b016902351a523aa2bd446b50a5bc1175d7a7d1cf90fe2ef904f9b84ebc/cryptography-46.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:258514877e15963bd43b558917bc9f54cf7cf866c38aa576ebf47a77ddbc43a4", size = 3412829, upload-time = "2026-04-08T01:57:48.874Z" }, ] [[package]] @@ -579,6 +579,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034, upload-time = "2022-05-02T15:47:14.552Z" }, ] +[[package]] +name = "googleapis-common-protos" +version = "1.73.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/c0/4a54c386282c13449eca8bbe2ddb518181dc113e78d240458a68856b4d69/googleapis_common_protos-1.73.1.tar.gz", hash = "sha256:13114f0e9d2391756a0194c3a8131974ed7bffb06086569ba193364af59163b6", size = 147506, upload-time = "2026-03-26T22:17:38.451Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/82/fcb6520612bec0c39b973a6c0954b6a0d948aadfe8f7e9487f60ceb8bfa6/googleapis_common_protos-1.73.1-py3-none-any.whl", hash = "sha256:e51f09eb0a43a8602f5a915870972e6b4a394088415c79d79605a46d8e826ee8", size = 297556, upload-time = "2026-03-26T22:15:58.455Z" }, +] + [[package]] name = "griffe" version = "1.14.0" @@ -646,6 +658,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + [[package]] name = "iniconfig" version = "2.1.0" @@ -710,6 +734,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] +[[package]] +name = "logfire" +version = "4.31.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "executing" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-sdk" }, + { name = "protobuf" }, + { name = "rich" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/fc/21f923243d8c3ca2ebfa97de46970ced734e66ac634c1c35b6abb41300f1/logfire-4.31.0.tar.gz", hash = "sha256:361bfda17c9d70ada5d220211033bae06b871ddac9d5b06978bc0ceca6b8e658", size = 1080609, upload-time = "2026-03-27T19:00:46.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/1a/8c860e35bf847ac0d647d94bad89dccbb66cbcafdd61d8334f8cc7cfdd58/logfire-4.31.0-py3-none-any.whl", hash = "sha256:49fad38b5e6f199a98e9c8814e860c8a42595bb81479b52a20413e53ee475b72", size = 308896, upload-time = "2026-03-27T19:00:43.107Z" }, +] + [[package]] name = "markdown" version = "3.9" @@ -797,6 +840,7 @@ dependencies = [ { name = "httpx" }, { name = "httpx-sse" }, { name = "jsonschema" }, + { name = "opentelemetry-api" }, { name = "pydantic" }, { name = "pydantic-settings" }, { name = "pyjwt", extra = ["crypto"] }, @@ -826,6 +870,7 @@ dev = [ { name = "coverage", extra = ["toml"] }, { name = "dirty-equals" }, { name = "inline-snapshot" }, + { name = "logfire" }, { name = "mcp", extra = ["cli", "ws"] }, { name = "pillow" }, { name = "pyright" }, @@ -850,9 +895,10 @@ docs = [ [package.metadata] requires-dist = [ { name = "anyio", specifier = ">=4.9" }, - { name = "httpx", specifier = ">=0.27.1" }, + { name = "httpx", specifier = ">=0.27.1,<1.0.0" }, { name = "httpx-sse", specifier = ">=0.4" }, { name = "jsonschema", specifier = ">=4.20.0" }, + { name = "opentelemetry-api", specifier = ">=1.28.0" }, { name = "pydantic", specifier = ">=2.12.0" }, { name = "pydantic-settings", specifier = ">=2.5.2" }, { name = "pyjwt", extras = ["crypto"], specifier = ">=2.10.1" }, @@ -876,6 +922,7 @@ dev = [ { name = "coverage", extras = ["toml"], specifier = ">=7.10.7,<=7.13" }, { name = "dirty-equals", specifier = ">=0.9.0" }, { name = "inline-snapshot", specifier = ">=0.23.0" }, + { name = "logfire", specifier = ">=3.0.0" }, { name = "mcp", extras = ["cli", "ws"], editable = "." }, { name = "pillow", specifier = ">=12.0" }, { name = "pyright", specifier = ">=1.1.400" }, @@ -1642,6 +1689,103 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] +[[package]] +name = "opentelemetry-api" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-proto" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/9d/22d241b66f7bbde88a3bfa6847a351d2c46b84de23e71222c6aae25c7050/opentelemetry_exporter_otlp_proto_common-1.39.1.tar.gz", hash = "sha256:763370d4737a59741c89a67b50f9e39271639ee4afc999dadfe768541c027464", size = 20409, upload-time = "2025-12-11T13:32:40.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/02/ffc3e143d89a27ac21fd557365b98bd0653b98de8a101151d5805b5d4c33/opentelemetry_exporter_otlp_proto_common-1.39.1-py3-none-any.whl", hash = "sha256:08f8a5862d64cc3435105686d0216c1365dc5701f86844a8cd56597d0c764fde", size = 18366, upload-time = "2025-12-11T13:32:20.2Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/04/2a08fa9c0214ae38880df01e8bfae12b067ec0793446578575e5080d6545/opentelemetry_exporter_otlp_proto_http-1.39.1.tar.gz", hash = "sha256:31bdab9745c709ce90a49a0624c2bd445d31a28ba34275951a6a362d16a0b9cb", size = 17288, upload-time = "2025-12-11T13:32:42.029Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/f1/b27d3e2e003cd9a3592c43d099d2ed8d0a947c15281bf8463a256db0b46c/opentelemetry_exporter_otlp_proto_http-1.39.1-py3-none-any.whl", hash = "sha256:d9f5207183dd752a412c4cd564ca8875ececba13be6e9c6c370ffb752fd59985", size = 19641, upload-time = "2025-12-11T13:32:22.248Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "packaging" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/41/0f/7e6b713ac117c1f5e4e3300748af699b9902a2e5e34c9cf443dde25a01fa/opentelemetry_instrumentation-0.60b1.tar.gz", hash = "sha256:57ddc7974c6eb35865af0426d1a17132b88b2ed8586897fee187fd5b8944bd6a", size = 31706, upload-time = "2025-12-11T13:36:42.515Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/d2/6788e83c5c86a2690101681aeef27eeb2a6bf22df52d3f263a22cee20915/opentelemetry_instrumentation-0.60b1-py3-none-any.whl", hash = "sha256:04480db952b48fb1ed0073f822f0ee26012b7be7c3eac1a3793122737c78632d", size = 33096, upload-time = "2025-12-11T13:35:33.067Z" }, +] + +[[package]] +name = "opentelemetry-proto" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/1d/f25d76d8260c156c40c97c9ed4511ec0f9ce353f8108ca6e7561f82a06b2/opentelemetry_proto-1.39.1.tar.gz", hash = "sha256:6c8e05144fc0d3ed4d22c2289c6b126e03bcd0e6a7da0f16cedd2e1c2772e2c8", size = 46152, upload-time = "2025-12-11T13:32:48.681Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/95/b40c96a7b5203005a0b03d8ce8cd212ff23f1793d5ba289c87a097571b18/opentelemetry_proto-1.39.1-py3-none-any.whl", hash = "sha256:22cdc78efd3b3765d09e68bfbd010d4fc254c9818afd0b6b423387d9dee46007", size = 72535, upload-time = "2025-12-11T13:32:33.866Z" }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/fb/c76080c9ba07e1e8235d24cdcc4d125ef7aa3edf23eb4e497c2e50889adc/opentelemetry_sdk-1.39.1.tar.gz", hash = "sha256:cf4d4563caf7bff906c9f7967e2be22d0d6b349b908be0d90fb21c8e9c995cc6", size = 171460, upload-time = "2025-12-11T13:32:49.369Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/98/e91cf858f203d86f4eccdf763dcf01cf03f1dae80c3750f7e635bfa206b6/opentelemetry_sdk-1.39.1-py3-none-any.whl", hash = "sha256:4d5482c478513ecb0a5d938dcc61394e647066e0cc2676bee9f3af3f3f45f01c", size = 132565, upload-time = "2025-12-11T13:32:35.069Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/df/553f93ed38bf22f4b999d9be9c185adb558982214f33eae539d3b5cd0858/opentelemetry_semantic_conventions-0.60b1.tar.gz", hash = "sha256:87c228b5a0669b748c76d76df6c364c369c28f1c465e50f661e39737e84bc953", size = 137935, upload-time = "2025-12-11T13:32:50.487Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" }, +] + [[package]] name = "outcome" version = "1.3.0.post0" @@ -1797,6 +1941,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "protobuf" +version = "6.33.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/70/e908e9c5e52ef7c3a6c7902c9dfbb34c7e29c25d2f81ade3856445fd5c94/protobuf-6.33.6.tar.gz", hash = "sha256:a6768d25248312c297558af96a9f9c929e8c4cee0659cb07e780731095f38135", size = 444531, upload-time = "2026-03-18T19:05:00.988Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/9f/2f509339e89cfa6f6a4c4ff50438db9ca488dec341f7e454adad60150b00/protobuf-6.33.6-cp310-abi3-win32.whl", hash = "sha256:7d29d9b65f8afef196f8334e80d6bc1d5d4adedb449971fefd3723824e6e77d3", size = 425739, upload-time = "2026-03-18T19:04:48.373Z" }, + { url = "https://files.pythonhosted.org/packages/76/5d/683efcd4798e0030c1bab27374fd13a89f7c2515fb1f3123efdfaa5eab57/protobuf-6.33.6-cp310-abi3-win_amd64.whl", hash = "sha256:0cd27b587afca21b7cfa59a74dcbd48a50f0a6400cfb59391340ad729d91d326", size = 437089, upload-time = "2026-03-18T19:04:50.381Z" }, + { url = "https://files.pythonhosted.org/packages/5c/01/a3c3ed5cd186f39e7880f8303cc51385a198a81469d53d0fdecf1f64d929/protobuf-6.33.6-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:9720e6961b251bde64edfdab7d500725a2af5280f3f4c87e57c0208376aa8c3a", size = 427737, upload-time = "2026-03-18T19:04:51.866Z" }, + { url = "https://files.pythonhosted.org/packages/ee/90/b3c01fdec7d2f627b3a6884243ba328c1217ed2d978def5c12dc50d328a3/protobuf-6.33.6-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e2afbae9b8e1825e3529f88d514754e094278bb95eadc0e199751cdd9a2e82a2", size = 324610, upload-time = "2026-03-18T19:04:53.096Z" }, + { url = "https://files.pythonhosted.org/packages/9b/ca/25afc144934014700c52e05103c2421997482d561f3101ff352e1292fb81/protobuf-6.33.6-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c96c37eec15086b79762ed265d59ab204dabc53056e3443e702d2681f4b39ce3", size = 339381, upload-time = "2026-03-18T19:04:54.616Z" }, + { url = "https://files.pythonhosted.org/packages/16/92/d1e32e3e0d894fe00b15ce28ad4944ab692713f2e7f0a99787405e43533a/protobuf-6.33.6-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:e9db7e292e0ab79dd108d7f1a94fe31601ce1ee3f7b79e0692043423020b0593", size = 323436, upload-time = "2026-03-18T19:04:55.768Z" }, + { url = "https://files.pythonhosted.org/packages/c4/72/02445137af02769918a93807b2b7890047c32bfb9f90371cbc12688819eb/protobuf-6.33.6-py3-none-any.whl", hash = "sha256:77179e006c476e69bf8e8ce866640091ec42e1beb80b213c3900006ecfba6901", size = 170656, upload-time = "2026-03-18T19:04:59.826Z" }, +] + [[package]] name = "pycparser" version = "2.23" @@ -2235,7 +2394,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.5" +version = "2.33.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -2243,9 +2402,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, + { url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" }, ] [[package]] @@ -2766,3 +2925,81 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/68/a1/dcb68430b1d00b698ae7a7e0194433bce4f07ded185f0ee5fb21e2a2e91e/websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122", size = 176884, upload-time = "2025-03-05T20:03:27.934Z" }, { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, ] + +[[package]] +name = "wrapt" +version = "1.17.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/23/bb82321b86411eb51e5a5db3fb8f8032fd30bd7c2d74bfe936136b2fa1d6/wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04", size = 53482, upload-time = "2025-08-12T05:51:44.467Z" }, + { url = "https://files.pythonhosted.org/packages/45/69/f3c47642b79485a30a59c63f6d739ed779fb4cc8323205d047d741d55220/wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2", size = 38676, upload-time = "2025-08-12T05:51:32.636Z" }, + { url = "https://files.pythonhosted.org/packages/d1/71/e7e7f5670c1eafd9e990438e69d8fb46fa91a50785332e06b560c869454f/wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c", size = 38957, upload-time = "2025-08-12T05:51:54.655Z" }, + { url = "https://files.pythonhosted.org/packages/de/17/9f8f86755c191d6779d7ddead1a53c7a8aa18bccb7cea8e7e72dfa6a8a09/wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775", size = 81975, upload-time = "2025-08-12T05:52:30.109Z" }, + { url = "https://files.pythonhosted.org/packages/f2/15/dd576273491f9f43dd09fce517f6c2ce6eb4fe21681726068db0d0467096/wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd", size = 83149, upload-time = "2025-08-12T05:52:09.316Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c4/5eb4ce0d4814521fee7aa806264bf7a114e748ad05110441cd5b8a5c744b/wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05", size = 82209, upload-time = "2025-08-12T05:52:10.331Z" }, + { url = "https://files.pythonhosted.org/packages/31/4b/819e9e0eb5c8dc86f60dfc42aa4e2c0d6c3db8732bce93cc752e604bb5f5/wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418", size = 81551, upload-time = "2025-08-12T05:52:31.137Z" }, + { url = "https://files.pythonhosted.org/packages/f8/83/ed6baf89ba3a56694700139698cf703aac9f0f9eb03dab92f57551bd5385/wrapt-1.17.3-cp310-cp310-win32.whl", hash = "sha256:a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390", size = 36464, upload-time = "2025-08-12T05:53:01.204Z" }, + { url = "https://files.pythonhosted.org/packages/2f/90/ee61d36862340ad7e9d15a02529df6b948676b9a5829fd5e16640156627d/wrapt-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6", size = 38748, upload-time = "2025-08-12T05:53:00.209Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c3/cefe0bd330d389c9983ced15d326f45373f4073c9f4a8c2f99b50bfea329/wrapt-1.17.3-cp310-cp310-win_arm64.whl", hash = "sha256:af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18", size = 36810, upload-time = "2025-08-12T05:52:51.906Z" }, + { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482, upload-time = "2025-08-12T05:51:45.79Z" }, + { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674, upload-time = "2025-08-12T05:51:34.629Z" }, + { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959, upload-time = "2025-08-12T05:51:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376, upload-time = "2025-08-12T05:52:32.134Z" }, + { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604, upload-time = "2025-08-12T05:52:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782, upload-time = "2025-08-12T05:52:12.626Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076, upload-time = "2025-08-12T05:52:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457, upload-time = "2025-08-12T05:53:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745, upload-time = "2025-08-12T05:53:02.885Z" }, + { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806, upload-time = "2025-08-12T05:52:53.368Z" }, + { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, + { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" }, + { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" }, + { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" }, + { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" }, + { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003, upload-time = "2025-08-12T05:51:48.627Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025, upload-time = "2025-08-12T05:51:37.156Z" }, + { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108, upload-time = "2025-08-12T05:51:58.425Z" }, + { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072, upload-time = "2025-08-12T05:52:37.53Z" }, + { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214, upload-time = "2025-08-12T05:52:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105, upload-time = "2025-08-12T05:52:17.914Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766, upload-time = "2025-08-12T05:52:39.243Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" }, + { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" }, + { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, + { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" }, + { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" }, + { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" }, + { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" }, + { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" }, + { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" }, + { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" }, + { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" }, + { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" }, + { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" }, + { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" }, + { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" }, + { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +]