diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index f92a64d..648a235 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -16,17 +16,6 @@ jobs: - "3.11" - "3.12" - "3.13" - # redis service - services: - redis: - image: redis:latest - ports: - - 6379:6379 - options: >- - --health-cmd "redis-cli ping" - --health-interval 10s - --health-timeout 5s - --health-retries 5 env: TERM: 'dumb' steps: @@ -47,4 +36,4 @@ jobs: uv run pyright streaq/ tests/ example.py - name: Test with pytest run: | - uv run pytest -n auto --cov=streaq --cov-report=term-missing tests/ --cov-fail-under=95 + docker compose run --rm tests uv run --locked --all-extras --dev pytest -n auto --dist=loadgroup --cov=streaq tests/ diff --git a/.gitignore b/.gitignore index 3c97fae..c0c84a2 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ tmp.py # C extensions *.so +data/ # Distribution / packaging .Python diff --git a/Makefile b/Makefile index adde0d3..d338abb 100644 --- a/Makefile +++ b/Makefile @@ -10,7 +10,10 @@ lint: uv run pyright streaq/ tests/ example.py test: - uv run pytest -n auto --cov=streaq --cov-report=term-missing --cov-fail-under=95 + UV_PYTHON=3.10 docker compose run --rm tests uv run --locked --all-extras --dev pytest -n auto --dist=loadgroup --cov=streaq tests/ docs: uv run -m sphinx -T -b html -d docs/_build/doctrees -D language=en docs/ docs/_build/ + +cleanup: + docker compose down --remove-orphans diff --git a/README.md b/README.md index 853a9c5..4ce7ee9 100644 --- a/README.md +++ b/README.md @@ -6,12 +6,12 @@ # streaQ -Fast, async, type-safe distributed task queue via Redis streams +Fast, async, fully-typed distributed task queue via Redis streams ## Features - Up to [5x faster](https://github.com/tastyware/streaq/tree/master/benchmarks) than `arq` -- Strongly typed +- Fully typed - 95%+ unit test coverage - Comprehensive documentation - Support for delayed/scheduled tasks @@ -23,7 +23,7 @@ Fast, async, type-safe distributed task queue via Redis streams - Support for synchronous tasks (run in separate threads) - Redis Sentinel support for production - Built-in web UI for monitoring tasks -- Built with structured concurrency on `anyio` +- Built with structured concurrency on `anyio`, supports both `asyncio` and `trio` ## Installation @@ -56,14 +56,15 @@ async def cronjob() -> None: print("Nobody respects the spammish repetition!") ``` -Finally, let's queue up some tasks: +Finally, let's initialize the worker and queue up some tasks: ```python -await sleeper.enqueue(3) -# enqueue returns a task object that can be used to get results/info -task = await sleeper.enqueue(1).start(delay=3) -print(await task.info()) -print(await task.result(timeout=5)) +async with worker: + await sleeper.enqueue(3) + # enqueue returns a task object that can be used to get results/info + task = await sleeper.enqueue(1).start(delay=3) + print(await task.info()) + print(await task.result(timeout=5)) ``` Putting this all together gives us [example.py](https://github.com/tastyware/streaq/blob/master/example.py). Let's spin up a worker: diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..1c590ff --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,182 @@ +services: + redis-master: + image: redis:latest + container_name: redis-master + hostname: redis-master + ports: + - "6379:6379" + volumes: + - ./data/master:/data + command: + [ + "redis-server", + "--appendonly", + "yes", + "--repl-diskless-load", + "on-empty-db", + "--protected-mode", + "no" + ] + healthcheck: + test: ["CMD-SHELL", "redis-cli -p 6379 ping | grep -q PONG"] + interval: 2s + timeout: 3s + retries: 15 + start_period: 5s + + slave-1: + image: redis:latest + container_name: slave-1 + hostname: slave-1 + depends_on: + - redis-master + ports: + - "6380:6379" + volumes: + - ./data/slave1:/data + command: + [ + "redis-server", + "--appendonly", + "yes", + "--replicaof", + "redis-master", + "6379", + "--repl-diskless-load", + "on-empty-db", + "--protected-mode", + "no" + ] + healthcheck: + test: ["CMD-SHELL", "redis-cli -p 6379 --raw INFO replication | grep -q '^role:slave' && redis-cli -p 6379 --raw INFO replication | grep -q 'master_link_status:up'"] + interval: 3s + timeout: 4s + retries: 15 + start_period: 15s + + slave-2: + image: redis:latest + container_name: slave-2 + hostname: slave-2 + depends_on: + - redis-master + ports: + - "6381:6379" + volumes: + - ./data/slave2:/data + command: + [ + "redis-server", + "--appendonly", + "yes", + "--replicaof", + "redis-master", + "6379", + "--repl-diskless-load", + "on-empty-db", + "--protected-mode", + "no" + ] + healthcheck: + test: ["CMD-SHELL", "redis-cli -p 6379 --raw INFO replication | grep -q '^role:slave' && redis-cli -p 6379 --raw INFO replication | grep -q 'master_link_status:up'"] + interval: 3s + timeout: 4s + retries: 15 + start_period: 15s + + sentinel-1: + image: redis:latest + container_name: sentinel-1 + hostname: sentinel-1 + depends_on: + - redis-master + ports: + - "26379:26379" + command: > + sh -c 'echo "bind 0.0.0.0" > /etc/sentinel.conf && + echo "sentinel monitor mymaster redis-master 6379 2" >> /etc/sentinel.conf && + echo "sentinel resolve-hostnames yes" >> /etc/sentinel.conf && + echo "sentinel down-after-milliseconds mymaster 10000" >> /etc/sentinel.conf && + echo "sentinel failover-timeout mymaster 10000" >> /etc/sentinel.conf && + echo "sentinel parallel-syncs mymaster 1" >> /etc/sentinel.conf && + redis-sentinel /etc/sentinel.conf' + healthcheck: + test: ["CMD-SHELL", "redis-cli -p 26379 SENTINEL ckquorum mymaster | grep -q ^OK"] + interval: 3s + timeout: 4s + retries: 60 + start_period: 10s + + sentinel-2: + image: redis:latest + container_name: sentinel-2 + hostname: sentinel-2 + depends_on: + - redis-master + ports: + - "26380:26379" + command: > + sh -c 'echo "bind 0.0.0.0" > /etc/sentinel.conf && + echo "sentinel monitor mymaster redis-master 6379 2" >> /etc/sentinel.conf && + echo "sentinel resolve-hostnames yes" >> /etc/sentinel.conf && + echo "sentinel down-after-milliseconds mymaster 10000" >> /etc/sentinel.conf && + echo "sentinel failover-timeout mymaster 10000" >> /etc/sentinel.conf && + echo "sentinel parallel-syncs mymaster 1" >> /etc/sentinel.conf && + redis-sentinel /etc/sentinel.conf' + healthcheck: + test: ["CMD-SHELL", "redis-cli -p 26379 SENTINEL ckquorum mymaster | grep -q ^OK"] + interval: 3s + timeout: 4s + retries: 60 + start_period: 10s + + sentinel-3: + image: redis:latest + container_name: sentinel-3 + hostname: sentinel-3 + depends_on: + - redis-master + ports: + - "26381:26379" + command: > + sh -c 'echo "bind 0.0.0.0" > /etc/sentinel.conf && + echo "sentinel monitor mymaster redis-master 6379 2" >> /etc/sentinel.conf && + echo "sentinel resolve-hostnames yes" >> /etc/sentinel.conf && + echo "sentinel down-after-milliseconds mymaster 10000" >> /etc/sentinel.conf && + echo "sentinel failover-timeout mymaster 10000" >> /etc/sentinel.conf && + echo "sentinel parallel-syncs mymaster 1" >> /etc/sentinel.conf && + redis-sentinel /etc/sentinel.conf' + healthcheck: + test: ["CMD-SHELL", "redis-cli -p 26379 SENTINEL ckquorum mymaster | grep -q ^OK"] + interval: 3s + timeout: 4s + retries: 60 + start_period: 10s + + tests: + image: ghcr.io/astral-sh/uv:debian + environment: + - UV_PYTHON=${PYTHON_VERSION} + - UV_LINK_MODE=copy + - UV_PYTHON_CACHE_DIR=/root/.cache/uv/python + working_dir: /app + volumes: + - .:/app + - /app/.venv + - uv-cache:/root/.cache/uv + depends_on: + redis-master: + condition: service_healthy + sentinel-1: + condition: service_healthy + sentinel-2: + condition: service_healthy + sentinel-3: + condition: service_healthy + slave-1: + condition: service_healthy + slave-2: + condition: service_healthy + +volumes: + uv-cache: {} # named volume for uv/pip caches diff --git a/docs/contributing.rst b/docs/contributing.rst new file mode 100644 index 0000000..c800b21 --- /dev/null +++ b/docs/contributing.rst @@ -0,0 +1,50 @@ +Contributing +============ + +Development +----------- + +Contributions to streaQ are always welcome! Most development tasks are in the included ``Makefile``: + +- ``make install``: set up the linting environment +- ``make lint``: run ruff to check formatting and pyright to check types +- ``make test``: use the included ``docker-compose.yml`` file to spin up Redis and Sentinel containers, then run test suite. This uses caching so it's faster after the first run. You'll need Docker and compose installed. +- ``make docs``: build the documentation pages with Sphinx +- ``make cleanup``: tear down running Docker containers + +If you need to test individual tests instead of the entire suite, you can do this: + +.. code-block:: bash + + UV_PYTHON=3.10 docker compose run --rm tests uv run --locked --all-extras --dev pytest -sk 'test_name' + +Benchmarks +---------- + +If you want to run the benchmarks yourself, first install the dependencies: + +.. code-block:: bash + + uv add streaq[benchmark] + +You can enqueue jobs like so: + +.. code-block:: bash + + python benchmarks/bench_streaq.py --time 1 + +Here, ``time`` is the number of seconds to sleep per task. + +You can run a worker with one of these commands, adjusting the number of workers as desired: + +.. code-block:: bash + + arq --workers ? --burst bench_arq.WorkerSettings + saq --quiet bench_saq.settings --workers ? + streaq --burst --workers ? bench_streaq.worker + taskiq worker --workers ? --max-async-tasks 32 bench_taskiq:broker --max-prefetch 32 + +Donating +-------- + +If you're interested in supporting the ongoing development of this project, donations are welcome! You can do so through GitHub: https://github.com/sponsors/tastyware diff --git a/docs/getting-started.rst b/docs/getting-started.rst index e7068bd..23d0372 100644 --- a/docs/getting-started.rst +++ b/docs/getting-started.rst @@ -7,7 +7,7 @@ To start, you'll need to create a ``Worker`` object. At worker creation, you can from contextlib import asynccontextmanager from dataclasses import dataclass - from typing import AsyncIterator + from typing import AsyncGenerator from httpx import AsyncClient from streaq import Worker @@ -20,7 +20,7 @@ To start, you'll need to create a ``Worker`` object. At worker creation, you can http_client: AsyncClient @asynccontextmanager - async def lifespan() -> AsyncIterator[WorkerContext]: + async def lifespan() -> AsyncGenerator[WorkerContext]: """ Here, we initialize the worker's dependencies. You can also do any startup/shutdown work here @@ -40,15 +40,16 @@ You can then register async tasks with the worker like this: res = await worker.context.http_client.get(url) return len(res.text) -Finally, let's queue up some tasks: +Finally, let's queue up some tasks via the worker's async context manager: .. code-block:: python - await fetch.enqueue("https://tastyware.dev/") - # enqueue returns a task object that can be used to get results/info - task = await fetch.enqueue("https://github.com/tastyware/streaq").start(delay=3) - print(await task.info()) - print(await task.result(timeout=5)) + async with worker: + await fetch.enqueue("https://tastyware.dev/") + # enqueue returns a task object that can be used to get results/info + task = await fetch.enqueue("https://github.com/tastyware/streaq").start(delay=3) + print(await task.info()) + print(await task.result(timeout=5)) Put this all together in a script and spin up a worker: diff --git a/docs/index.rst b/docs/index.rst index 7959ada..ff03cc1 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -28,6 +28,8 @@ Fast, async, type-safe job queuing with Redis streams +----------------------------+--------+-----+-----+--------+ | Task middleware | ✅ | ✅ | ✅ | ✅ | +----------------------------+--------+-----+-----+--------+ +| Web UI available | ✅ | ✅ | ✅ | ✅ | ++----------------------------+--------+-----+-----+--------+ | Actively maintained | ✅ | ❌ | ✅ | ✅ | +----------------------------+--------+-----+-----+--------+ | Custom serializers | ✅ | ✅ | ❌ | ✅ | @@ -48,10 +50,10 @@ Fast, async, type-safe job queuing with Redis streams +----------------------------+--------+-----+-----+--------+ | Redis Sentinel support | ✅ | ❌ | ❌ | ✅ | +----------------------------+--------+-----+-----+--------+ -| Web UI available | ✅ | ✅ | ✅ | ✅ | -+----------------------------+--------+-----+-----+--------+ | Structured concurrency | ❌ | ❌ | ❌ | ✅ | +----------------------------+--------+-----+-----+--------+ +| Trio support | ❌ | ❌ | ❌ | ✅ | ++----------------------------+--------+-----+-----+--------+ .. toctree:: :maxdepth: 2 @@ -65,6 +67,7 @@ Fast, async, type-safe job queuing with Redis streams middleware cli integrations + contributing .. toctree:: :maxdepth: 2 diff --git a/docs/integrations.rst b/docs/integrations.rst index 38f74a7..ef5826e 100644 --- a/docs/integrations.rst +++ b/docs/integrations.rst @@ -8,19 +8,33 @@ Integration with FastAPI is straightforward: .. code-block:: python - from fastapi import FastAPI + from fastapi import FastAPI, HTTPException, status from example import fetch - app = FastAPI() + @asynccontextmanager + async def app_lifespan(app: FastAPI) -> AsyncGenerator[None]: + async with worker: + yield - @app.post("/enqueue") - async def enqueue(url: str) -> bool: - task = await fetch.enqueue(url) - res = await task.result(5) - return res.success + app = FastAPI(lifespan=app_lifespan) -Here, we're building off of the ``fetch`` task defined in :doc:`Getting started `. But what if the backend doesn't have access to the task definitions? + @app.post("/fetch") + async def do_fetch(url: str) -> int: + task = await fetch.enqueue(url) + try: + res = await task.result(5) + except TimeoutError as e: + raise HTTPException( + status_code=status.HTTP_408_REQUEST_TIMEOUT, detail="Timed out!" + ) + if not res.success: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail="Task failed!" + ) + return res.result + +Here, we're building off of the ``fetch`` task defined in :doc:`Getting started `. As you can imagine, integrating with other frameworks should be very similar! Separating enqueuing from task definitions ------------------------------------------ @@ -44,7 +58,8 @@ Now, tasks can be enqueued in the same way as before: .. code-block:: python - await fetch.enqueue("https://github.com/tastyware/streaq") + async with worker: + await fetch.enqueue("https://github.com/tastyware/streaq") .. warning:: @@ -60,7 +75,8 @@ The second way is to use ``Worker.enqueue_unsafe``: # signing key, and queue name as the worker defined elsewhere worker = Worker(redis_url="redis://localhost:6379") - await worker.enqueue_unsafe("fetch", "https://tastyware.dev") + async with worker: + await worker.enqueue_unsafe("fetch", "https://tastyware.dev") This method is not type-safe, but it doesn't require you to re-define the task signature in the backend. Here, the first parameter is the ``fn_name`` of the task defined elsewhere, and the rest of the args and kwargs can be passed normally. @@ -75,7 +91,7 @@ With a little work the UI can be mounted as a part of an existing FastAPI applic from streaq.ui import get_worker, router - app = FastAPI() + app = FastAPI(lifespan=app_lifespan) # see above, we need the worker to be initialized app.dependency_overrides[get_worker] = lambda: worker # here, you can add any auth-related dependencies as well app.include_router(router, prefix="/streaq", dependencies=[...]) diff --git a/docs/task.rst b/docs/task.rst index 6ef4434..2c02f61 100644 --- a/docs/task.rst +++ b/docs/task.rst @@ -4,9 +4,7 @@ Tasks Task execution -------------- -streaQ preserves arq's task execution model, called "pessimistic execution": tasks aren’t removed from the queue until they’ve either succeeded or failed. If the worker shuts down, the task will be cancelled immediately and will remain in the queue to be run again when the worker starts up again (or gets run by another worker which is still running). - -In the case of a catastrophic failure (that is, the worker shuts down abruptly without doing cleanup), tasks can still be retried, as workers will refresh timeouts for running tasks every ``Worker.idle_timeout`` seconds. +streaQ preserves arq's task execution model called "pessimistic execution": tasks aren’t removed from the queue until they’ve either succeeded or failed. If the worker shuts down, the task will remain in the queue to be picked up by another worker. ``Worker.idle_timeout`` controls how often task liveness is updated (and consequently, how quickly failed tasks can be retried). All streaQ tasks should therefore be designed to cope with being called repeatedly if they’re cancelled. If necessary, use database transactions, idempotency keys or Redis to mark when non-repeatable work has completed to avoid doing it twice. @@ -16,7 +14,7 @@ All streaQ tasks should therefore be designed to cope with being called repeated streaQ handles exceptions in the following manner: * ``StreaqRetry`` exceptions result in retrying the task, sometimes after a delay (see below). -* ``asyncio.CancelledError`` exceptions result in the task failing if the task was aborted by the user, or being retried if the worker was shut down unexpectedly. +* ``asyncio.CancelledError`` or ``trio.Cancelled`` exceptions result in the task failing if the task was aborted by the user, or being retried if the worker was shut down unexpectedly. * ``TimeoutError`` exceptions result in the task failing if the task took too long to run. * Any other ``Exception`` will result in the task failing. @@ -34,15 +32,17 @@ We can now register async functions with the worker: .. code-block:: python + from anyio import sleep # you can just as well use asyncio or trio + @worker.task() async def sleeper(time: int) -> int: - await asyncio.sleep(time) + await sleep(time) return time The ``task`` decorator has several optional arguments that can be used to customize behavior: -- ``expire``: time after which to dequeue the task, if None will never be dequeued -- ``max_tries``: maximum number of attempts before giving up if task is retried; defaults to 3 +- ``expire``: time after which to dequeue the task, if ``None`` will never be dequeued +- ``max_tries``: maximum number of attempts before giving up if task is retried; defaults to ``3`` - ``name``: use a custom name for the task instead of the function name - ``silent``: whether to silence task startup/shutdown logs and task success/failure tracking; defaults to False - ``timeout``: amount of time to run the task before raising ``TimeoutError``; ``None`` (the default) means never timeout @@ -52,13 +52,14 @@ The ``task`` decorator has several optional arguments that can be used to custom Enqueuing tasks --------------- -Once registered, tasks can then be queued up for execution by worker processes, with full type safety: +Once registered, tasks can then be queued up for execution by worker processes (with full type safety!) using the worker's async context manager: .. code-block:: python - # these two are equivalent - await sleeper.enqueue(5) - await sleeper.enqueue(5).start() + async with worker: + # these two are equivalent + await sleeper.enqueue(5) + await sleeper.enqueue(5).start() We can also defer task execution to a later time: @@ -66,16 +67,21 @@ We can also defer task execution to a later time: from datetime import datetime - await sleeper.enqueue(3).start(delay=10) # start after 10 seconds - await sleeper.enqueue(3).start(schedule=datetime(...)) # start at a specific time + async with worker: + await sleeper.enqueue(3).start(delay=10) # start after 10 seconds + await sleeper.enqueue(3).start(schedule=datetime(...)) # start at a specific time Tasks can depend on other tasks, meaning they won't be enqueued until their dependencies have finished successfully. If the dependency fails, the dependent task will not be enqueued. .. code-block:: python - task1 = await sleeper.enqueue(1) - task2 = await sleeper.enqueue(2).start(after=task1.id) - task3 = await sleeper.enqueue(3).start(after=[task1.id, task2.id]) + async with worker: + task1 = await sleeper.enqueue(1) + task2 = await sleeper.enqueue(2).start(after=task1.id) + task3 = await sleeper.enqueue(3).start(after=[task1.id, task2.id]) + +.. note:: + ``Task.enqueue()`` is actually a sync function that returns a ``Task`` object. Since ``Task`` is awaitable, it gets enqueued when awaited. Therefore, you should always use await even though ``Task.enqueue()`` is sync, unless you're enqueuing by batch (see below). Task priorities --------------- @@ -89,7 +95,8 @@ By passing the ``priorities`` argument on worker creation, you can create an arb # this list should be ordered from lowest to highest worker = Worker(priorities=["low", "high"]) - await sleeper.enqueue(3).start(priority="low") + async with worker: + await sleeper.enqueue(3).start(priority="low") Here's an example that demonstrates how priorities work. Note that the low priority task is enqueued first, but the high priority task is executed first. (Make sure to run this *before* starting the worker!) @@ -105,8 +112,9 @@ Here's an example that demonstrates how priorities work. Note that the low prior async def high() -> None: print("High priority task") - await low.enqueue().start(priority="low") - await high.enqueue().start(priority="high") + async with worker: + await low.enqueue().start(priority="low") + await high.enqueue().start(priority="high") Enqueuing by batch ------------------ @@ -117,7 +125,8 @@ For most cases, the above method of enqueuing tasks is sufficient. However, stre # importantly, we're not using `await` here tasks = [sleeper.enqueue(i) for i in range(10)] - await worker.enqueue_many(tasks) + async with worker: + await worker.enqueue_many(tasks) Running tasks locally --------------------- @@ -139,10 +148,11 @@ Enqueued tasks return a ``Task`` object which can be used to wait for task resul from datetime import timedelta - task = await sleeper.enqueue(3).start(delay=timedelta(seconds=5)) - print(await task.status()) - print(await task.result()) - print(await task.status()) + async with worker: + task = await sleeper.enqueue(3).start(delay=timedelta(seconds=5)) + print(await task.status()) + print(await task.result()) + print(await task.status()) .. code-block:: python @@ -228,7 +238,8 @@ Note that if the task waiting for its completion is cancelled, the thread will s return seconds # here we use await, the wrapper does the magic for us! - task = await sync_sleep.enqueue(1) + async with worker: + task = await sync_sleep.enqueue(1) print(await task.result(3)) Task dependency graph @@ -240,9 +251,10 @@ Dependencies can be specified using the ``after`` parameter of the ``Task.start` .. code-block:: python - task1 = await sleeper.enqueue(1) - task2 = await sleeper.enqueue(2).start(after=task1.id) - task3 = await sleeper.enqueue(3).start(after=[task1.id, task2.id]) + async with worker: + task1 = await sleeper.enqueue(1) + task2 = await sleeper.enqueue(2).start(after=task1.id) + task3 = await sleeper.enqueue(3).start(after=[task1.id, task2.id]) And the dependency failing will cause dependent tasks to fail as well: @@ -256,9 +268,10 @@ And the dependency failing will cause dependent tasks to fail as well: async def do_nothing() -> None: pass - task = await foobar.enqueue().start() - dep = await do_nothing.enqueue().start(after=task.id) - print(await dep.result(3)) + async with worker: + task = await foobar.enqueue().start() + dep = await do_nothing.enqueue().start(after=task.id) + print(await dep.result(3)) Task pipelining --------------- @@ -299,28 +312,29 @@ This is useful for ETL pipelines or similar tasks, where each task builds upon t async def map(data: Sequence[Any], to: str) -> list[Any]: task = worker.registry[to] coros = [task.enqueue(*to_tuple(d)).start() for d in data] - tasks = await asyncio.gather(*coros) - results = await asyncio.gather(*[t.result(3) for t in tasks]) + tasks = await gather(*coros) + results = await gather(*[t.result(3) for t in tasks]) return [r.result for r in results] @worker.task() async def filter(data: Sequence[Any], by: str) -> list[Any]: task = worker.registry[by] coros = [task.enqueue(*to_tuple(d)).start() for d in data] - tasks = await asyncio.gather(*coros) - results = await asyncio.gather(*[t.result(5) for t in tasks]) + tasks = await gather(*coros) + results = await gather(*[t.result(5) for t in tasks]) return [data[i] for i in range(len(data)) if results[i].result] - data = [0, 1, 2, 3] - t1 = await map.enqueue(data, to=double.fn_name).then(filter, by=is_even.fn_name) - print(await t1.result()) - t2 = await filter.enqueue(data, by=is_even.fn_name).then(map, to=double.fn_name) - print(await t2.result()) + async with worker: + data = [0, 1, 2, 3] + t1 = await map.enqueue(data, to=double.fn_name).then(filter, by=is_even.fn_name) + print(await t1.result()) + t2 = await filter.enqueue(data, by=is_even.fn_name).then(map, to=double.fn_name) + print(await t2.result()) .. code-block:: python TaskResult(fn_name='filter', enqueue_time=1751712228859, success=True, result=[0, 2, 4, 6], start_time=1751712228895, finish_time=1751712228919, tries=1, worker_id='ca5bd9eb') TaskResult(fn_name='map', enqueue_time=1751712228923, success=True, result=[0, 4], start_time=1751712228951, finish_time=1751712228966, tries=1, worker_id='ca5bd9eb') -.. note:: +.. warning:: For pipelined tasks, positional arguments must all come from the previous task (tuple outputs will be unpacked), and any additional arguments can be passed as kwargs to ``then()``. diff --git a/docs/worker.rst b/docs/worker.rst index d41c38e..03b9874 100644 --- a/docs/worker.rst +++ b/docs/worker.rst @@ -28,16 +28,16 @@ Next, create an async context manager to run at worker creation/teardown. Use th .. code-block:: python from contextlib import asynccontextmanager - from typing import AsyncIterator + from typing import AsyncGenerator from streaq import Worker @asynccontextmanager - async def lifespan() -> AsyncIterator[WorkerContext]: + async def lifespan() -> AsyncGenerator[WorkerContext]: # here we run code if desired after worker start up # yield our dependencies as an instance of the class async with AsyncClient() as http_client: yield WorkerContext(http_client) - # here we run code if desired before worker shutdown + # here we run code if desired before worker shutdown Now, tasks created for the worker will have access to the dependencies like so: @@ -83,28 +83,33 @@ Other configuration options ``Worker`` accepts a variety of other configuration options: - ``redis_url``: the URI for connecting to your Redis instance -- ``concurrency``: the maximum number of tasks the worker can run concurrently; by default, this also controls the number of tasks which will be pre-fetched by the worker +- ``redis_kwargs``: additional arguments for Redis connections +- ``concurrency``: the maximum number of tasks the worker can run concurrently - ``sync_concurrency``: the maximum number of tasks the worker can run simultaneously in separate threads; defaults to the same as ``concurrency`` +- ``queue_name``: name of the queue in Redis, can be used to create multiple queues at once +- ``priorities``: a list of custom priorities for tasks, ordered from lowest to highest - ``prefetch``: the number of tasks to pre-fetch from Redis, defaults to ``concurrency``. You can set this to ``0`` to disable prefetching entirely. - ``tz``: ``tzinfo`` controlling the time zone for the worker's cron scheduler and logs -- ``queue_name``: name of the queue in Redis, can be used to create multiple queues at once -- ``health_check_interval``: how often to log info about worker and Redis health (also stored in Redis) +- ``handle_signals``: whether to handle signals for graceful shutdown (unavailable on Windows) +- ``health_crontab``: crontab for frequency to store worker health in Redis - ``idle_timeout``: the amount of time to wait before re-enqueuing idle tasks (either prefetched tasks that don't run, or running tasks that become unresponsive) -- ``priorities``: a list of custom priorities for tasks, ordered from lowest to highest +- ``anyio_backend``: either trio or asyncio, defaults to asyncio +- ``anyio_kwargs``: extra arguments for anyio, see documentation `here `_ +- ``sentinel_kwargs``: extra arguments to pass to sentinel connections (see below) Deploying with Redis Sentinel ----------------------------- -In production environments, oftentimes high availability guarantees are needed, which is why Redis Sentinel was created. streaQ allows you to use Redis Sentinel easily: +In production environments, high availability guarantees are often needed, which is why Redis Sentinel was created. streaQ allows you to use Redis Sentinel easily: .. code-block:: python worker = Worker( redis_sentinel_master="mymaster", redis_sentinel_nodes=[ - ("localhost", 26379), - ("localhost", 26380), - ("localhost", 26381), + ("sentinel-1", 26379), + ("sentinel-2", 26379), + ("sentinel-3", 26379), ], ) diff --git a/example.py b/example.py index 15f5ee2..d94769f 100644 --- a/example.py +++ b/example.py @@ -1,4 +1,4 @@ -import asyncio +from anyio import run, sleep from streaq import Worker @@ -7,7 +7,7 @@ @worker.task() async def sleeper(time: int) -> int: - await asyncio.sleep(time) + await sleep(time) return time @@ -17,12 +17,13 @@ async def cronjob() -> None: async def main() -> None: - await sleeper.enqueue(3) - # enqueue returns a task object that can be used to get results/info - task = await sleeper.enqueue(1).start(delay=3) - print(await task.info()) - print(await task.result(timeout=5)) + async with worker: + await sleeper.enqueue(3) + # enqueue returns a task object that can be used to get results/info + task = await sleeper.enqueue(1).start(delay=3) + print(await task.info()) + print(await task.result(timeout=5)) if __name__ == "__main__": - asyncio.run(main()) + run(main) diff --git a/pyproject.toml b/pyproject.toml index 0137a85..dba49d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ exclude = ["**/__pycache__/**"] [project] name = "streaq" -description = "Fast, async, type-safe job queuing with Redis streams" +description = "Fast, async, fully-typed distributed task queue via Redis streams" readme = "README.md" classifiers = [ "Development Status :: 5 - Production/Stable", @@ -25,6 +25,7 @@ classifiers = [ "Framework :: Django", "Framework :: FastAPI", "Framework :: Flask", + "Framework :: Trio", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: System Administrators", @@ -55,7 +56,7 @@ authors = [ ] dependencies = [ "anyio>=4.10.0", - "coredis>=5.0.1", + "coredis", "crontab>=1.0.5", "typer>=0.16.1", "uvloop>=0.21.0; sys_platform != 'win32'", @@ -98,7 +99,7 @@ dev = [ "ruff>=0.12.10", "sphinx>=8.1.3", "sphinx-immaterial>=0.13.6", - "testcontainers[redis]>=4.12.0", + "trio>=0.30.0", ] [tool.pytest.ini_options] @@ -106,9 +107,24 @@ testpaths = "tests" [tool.uv.sources] arq = { git = "https://github.com/graeme22/arq" } +coredis = { git = "https://github.com/Graeme22/coredis.git", rev = "anyio" } [tool.ruff.lint] select = ["E", "F", "I"] [tool.pyright] strict = ["streaq/"] + +[tool.coverage.run] +branch = true +parallel = true +source = ["streaq"] +concurrency = ["multiprocessing", "thread"] +patch = ["subprocess"] + +[tool.coverage.report] +show_missing = true +fail_under = 95 + +[tool.coverage.paths] +streaq = ["streaq", "/app/streaq"] diff --git a/streaq/__init__.py b/streaq/__init__.py index 683dead..04e6759 100644 --- a/streaq/__init__.py +++ b/streaq/__init__.py @@ -2,7 +2,7 @@ import coredis -VERSION = "5.2.2" +VERSION = "6.0.0" __version__ = VERSION logger = logging.getLogger(__name__) diff --git a/streaq/__main__.py b/streaq/__main__.py index 98dcca0..effce86 100644 --- a/streaq/__main__.py +++ b/streaq/__main__.py @@ -1,4 +1,4 @@ from .cli import cli -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover cli() diff --git a/streaq/lua/create_groups.lua b/streaq/lua/create_groups.lua deleted file mode 100644 index 0212e4a..0000000 --- a/streaq/lua/create_groups.lua +++ /dev/null @@ -1,11 +0,0 @@ -local stream_key = KEYS[1] -local group_name = KEYS[2] - -for i=1, #ARGV do - local stream = stream_key .. ARGV[i] - -- create group if it doesn't exist - local ok, groups = pcall(redis.call, 'xinfo', 'groups', stream) - if not ok or #groups == 0 then - redis.call('xgroup', 'create', stream, group_name, '0', 'mkstream') - end -end diff --git a/streaq/lua/fail_dependents.lua b/streaq/lua/fail_dependents.lua deleted file mode 100644 index cb63fc3..0000000 --- a/streaq/lua/fail_dependents.lua +++ /dev/null @@ -1,31 +0,0 @@ -local dependents_key = KEYS[1] -local dependencies_key = KEYS[2] -local task_id = KEYS[3] - -local visited = {} -local failed = {} -local stack = { task_id } - --- iterative DFS to traverse DAG -while #stack > 0 do - -- pop off last element - local tid = stack[#stack] - stack[#stack] = nil - if not visited[tid] then - visited[tid] = true - -- push dependents onto the stack - local deps = redis.call('smembers', dependents_key .. tid) - for _, dep_id in ipairs(deps) do - stack[#stack + 1] = dep_id - redis.call('srem', dependencies_key .. dep_id, tid) - end - -- remove dependents set - redis.call('del', dependents_key .. tid) - -- add to failed list - if tid ~= task_id then - failed[#failed + 1] = tid - end - end -end - -return failed diff --git a/streaq/lua/publish_delayed_tasks.lua b/streaq/lua/publish_delayed_tasks.lua deleted file mode 100644 index 3630eaf..0000000 --- a/streaq/lua/publish_delayed_tasks.lua +++ /dev/null @@ -1,20 +0,0 @@ -local queue_key = KEYS[1] -local stream_key = KEYS[2] - -local current_time = ARGV[1] - -for i=2, #ARGV do - local priority = ARGV[i] - local queue = queue_key .. priority - -- get and delete tasks ready to run from delayed queue - local tids = redis.call('zrange', queue, 0, current_time, 'byscore') - if #tids > 0 then - redis.call('zremrangebyscore', queue, 0, current_time) - - local stream = stream_key .. priority - -- add ready tasks to live queue - for j=1, #tids do - redis.call('xadd', stream, '*', 'task_id', tids[j]) - end - end -end diff --git a/streaq/lua/publish_task.lua b/streaq/lua/publish_task.lua deleted file mode 100644 index 5b4ef46..0000000 --- a/streaq/lua/publish_task.lua +++ /dev/null @@ -1,46 +0,0 @@ -local stream_key = KEYS[1] -local queue_key = KEYS[2] -local task_key = KEYS[3] -local dependents_key = KEYS[4] -local dependencies_key = KEYS[5] -local results_key = KEYS[6] - -local task_id = ARGV[1] -local task_data = ARGV[2] -local priority = ARGV[3] -local score = ARGV[4] -local expire = ARGV[5] - -local args -if expire ~= '0' then - args = {'set', task_key, task_data, 'nx', 'px', expire} -else - args = {'set', task_key, task_data, 'nx'} -end - -if not redis.call(unpack(args)) then return 0 end - -local modified = 0 --- additional args are dependencies for task -for i=6, #ARGV do - local dep_id = ARGV[i] - -- update dependency DAG if dependency exists - if redis.call('exists', results_key .. dep_id) ~= 1 then - modified = modified + 1 - redis.call('sadd', dependencies_key .. task_id, dep_id) - redis.call('sadd', dependents_key .. dep_id, task_id) - end -end - --- if there are dependencies don't queue yet -if modified == 0 then - -- delayed queue - if score ~= '0' then - redis.call('zadd', queue_key .. priority, score, task_id) - -- live queue - else - return redis.call('xadd', stream_key .. priority, '*', 'task_id', task_id) - end -end - -return 1 diff --git a/streaq/lua/read_streams.lua b/streaq/lua/read_streams.lua deleted file mode 100644 index e71ae65..0000000 --- a/streaq/lua/read_streams.lua +++ /dev/null @@ -1,39 +0,0 @@ -local stream_key = KEYS[1] -local group_name = KEYS[2] -local consumer_name = KEYS[3] - -local count = tonumber(ARGV[1]) -local idle = ARGV[2] - -local entries = {} - --- additional arguments are the names of custom priorities -for i = 3, #ARGV do - local stream = stream_key .. ARGV[i] - local entry_table = {} - -- first, check for idle messages to reclaim - local reclaimed = redis.call('xautoclaim', stream, group_name, consumer_name, idle, '0-0', 'count', count)[2] - -- output format should match XREADGROUP - if #reclaimed > 0 then - for j=1, #reclaimed do entry_table[j] = reclaimed[j] end - count = count - #reclaimed - end - -- next, check for new messages - if count > 0 then - local res = redis.call('xreadgroup', 'group', group_name, consumer_name, 'count', count, 'streams', stream, '>') - local read = res and res[1][2] - if read then - -- this is the table we just created - local len = #entry_table - for j = 1, #read do entry_table[len + j] = read[j] end - count = count - #read - end - end - - if #entry_table > 0 then - table.insert(entries, {stream, entry_table}) - end - if count <= 0 then break end -end - -return entries diff --git a/streaq/lua/streaq.lua b/streaq/lua/streaq.lua new file mode 100644 index 0000000..1efa253 --- /dev/null +++ b/streaq/lua/streaq.lua @@ -0,0 +1,185 @@ +#!lua name=streaq + +redis.register_function('create_groups', function(keys, argv) + local stream_key = keys[1] + local group_name = keys[2] + + for i=1, #argv do + local stream = stream_key .. argv[i] + -- create group if it doesn't exist + local ok, groups = pcall(redis.call, 'xinfo', 'groups', stream) + if not ok or #groups == 0 then + redis.call('xgroup', 'create', stream, group_name, '0', 'mkstream') + end + end +end) + +redis.register_function('fail_dependents', function(keys, argv) + local dependents_key = keys[1] + local dependencies_key = keys[2] + local task_id = keys[3] + + local visited = {} + local failed = {} + local stack = { task_id } + + -- iterative DFS to traverse DAG + while #stack > 0 do + -- pop off last element + local tid = stack[#stack] + stack[#stack] = nil + if not visited[tid] then + visited[tid] = true + -- push dependents onto the stack + local deps = redis.call('smembers', dependents_key .. tid) + for _, dep_id in ipairs(deps) do + stack[#stack + 1] = dep_id + redis.call('srem', dependencies_key .. dep_id, tid) + end + -- remove dependents set + redis.call('del', dependents_key .. tid) + -- add to failed list + if tid ~= task_id then + failed[#failed + 1] = tid + end + end + end + + return failed +end) + +redis.register_function('publish_delayed_tasks', function(keys, argv) + local queue_key = keys[1] + local stream_key = keys[2] + + local current_time = argv[1] + + for i=2, #argv do + local priority = argv[i] + local queue = queue_key .. priority + -- get and delete tasks ready to run from delayed queue + local tids = redis.call('zrange', queue, 0, current_time, 'byscore') + if #tids > 0 then + redis.call('zremrangebyscore', queue, 0, current_time) + + local stream = stream_key .. priority + -- add ready tasks to live queue + for j=1, #tids do + redis.call('xadd', stream, '*', 'task_id', tids[j]) + end + end + end +end) + +redis.register_function('publish_task', function(keys, argv) + local stream_key = keys[1] + local queue_key = keys[2] + local task_key = keys[3] + local dependents_key = keys[4] + local dependencies_key = keys[5] + local results_key = keys[6] + + local task_id = argv[1] + local task_data = argv[2] + local priority = argv[3] + local score = argv[4] + local expire = argv[5] + + local args + if expire ~= '0' then + args = {'set', task_key, task_data, 'nx', 'px', expire} + else + args = {'set', task_key, task_data, 'nx'} + end + + if not redis.call(unpack(args)) then return 0 end + + local modified = 0 + -- additional args are dependencies for task + for i=6, #argv do + local dep_id = argv[i] + -- update dependency DAG if dependency exists + if redis.call('exists', results_key .. dep_id) ~= 1 then + modified = modified + 1 + redis.call('sadd', dependencies_key .. task_id, dep_id) + redis.call('sadd', dependents_key .. dep_id, task_id) + end + end + + -- if there are dependencies don't queue yet + if modified == 0 then + -- delayed queue + if score ~= '0' then + redis.call('zadd', queue_key .. priority, score, task_id) + -- live queue + else + return redis.call('xadd', stream_key .. priority, '*', 'task_id', task_id) + end + end + + return 1 +end) + +redis.register_function('read_streams', function(keys, argv) + local stream_key = keys[1] + local group_name = keys[2] + local consumer_name = keys[3] + + local count = tonumber(argv[1]) + local idle = argv[2] + + local entries = {} + + -- additional arguments are the names of custom priorities + for i = 3, #argv do + local stream = stream_key .. argv[i] + local entry_table = {} + -- first, check for idle messages to reclaim + local reclaimed = redis.call('xautoclaim', stream, group_name, consumer_name, idle, '0-0', 'count', count)[2] + -- output format should match XREADGROUP + if #reclaimed > 0 then + for j=1, #reclaimed do entry_table[j] = reclaimed[j] end + count = count - #reclaimed + end + -- next, check for new messages + if count > 0 then + local res = redis.call('xreadgroup', 'group', group_name, consumer_name, 'count', count, 'streams', stream, '>') + local read = res and res[1][2] + if read then + -- this is the table we just created + local len = #entry_table + for j = 1, #read do entry_table[len + j] = read[j] end + count = count - #read + end + end + + if #entry_table > 0 then + table.insert(entries, {stream, entry_table}) + end + if count <= 0 then break end + end + + return entries +end) + +redis.register_function('update_dependents', function(keys, argv) + local dependents_key = keys[1] + local dependencies_key = keys[2] + local task_id = keys[3] + + local runnable = {} + + local deps = redis.call('smembers', dependents_key .. task_id) + for i = 1, #deps do + local dep = deps[i] + redis.call('srem', dependencies_key .. dep, task_id) + -- if no more dependencies are left, it's time to enqueue! + if redis.call('scard', dependencies_key .. dep) == 0 then + table.insert(runnable, dep) + end + end + + redis.call('del', dependents_key .. task_id, dependencies_key .. task_id) + + return runnable +end) diff --git a/streaq/lua/update_dependents.lua b/streaq/lua/update_dependents.lua deleted file mode 100644 index 66b46b8..0000000 --- a/streaq/lua/update_dependents.lua +++ /dev/null @@ -1,19 +0,0 @@ -local dependents_key = KEYS[1] -local dependencies_key = KEYS[2] -local task_id = KEYS[3] - -local runnable = {} - -local deps = redis.call('smembers', dependents_key .. task_id) -for i = 1, #deps do - local dep = deps[i] - redis.call('srem', dependencies_key .. dep, task_id) - -- if no more dependencies are left, it's time to enqueue! - if redis.call('scard', dependencies_key .. dep) == 0 then - table.insert(runnable, dep) - end -end - -redis.call('del', dependents_key .. task_id, dependencies_key .. task_id) - -return runnable diff --git a/streaq/task.py b/streaq/task.py index a4fb3d4..6ec6d2e 100644 --- a/streaq/task.py +++ b/streaq/task.py @@ -78,11 +78,29 @@ class TaskResult(Generic[R]): fn_name: str enqueue_time: int success: bool - result: R | Exception start_time: int finish_time: int tries: int worker_id: str + _result: R | BaseException + + @property + def result(self) -> R: + if not self.success: + raise StreaqError( + "Can't access result for a failed task, use TaskResult.exception " + "instead!" + ) + return self._result # type: ignore + + @property + def exception(self) -> BaseException: + if self.success: + raise StreaqError( + "Can't access exception for a successful task, use TaskResult.result " + "instead!" + ) + return self._result # type: ignore @dataclass @@ -152,7 +170,8 @@ async def _enqueue(self) -> Task[R]: data = self.serialize(enqueue_time) _priority = self.priority or self.parent.worker.priorities[-1] expire = to_ms(self.parent.expire or 0) - if not await self.parent.worker.publish_task( + if not await self.parent.worker.redis.fcall( + "publish_task", keys=[ self.parent.worker.stream_key, self.parent.worker.queue_key, diff --git a/streaq/ui/tasks.py b/streaq/ui/tasks.py index 357e416..43fc49a 100644 --- a/streaq/ui/tasks.py +++ b/streaq/ui/tasks.py @@ -1,8 +1,6 @@ -import asyncio from datetime import datetime from typing import Annotated, Any -from async_lru import alru_cache from fastapi import ( APIRouter, Depends, @@ -20,6 +18,7 @@ from streaq import TaskStatus, Worker from streaq.constants import REDIS_RESULT, REDIS_RUNNING, REDIS_TASK from streaq.ui.deps import get_worker, templates +from streaq.utils import gather router = APIRouter() @@ -35,33 +34,31 @@ class TaskData(BaseModel): url: str -@alru_cache(ttl=1) async def _get_context( worker: Worker[Any], task_url: str, descending: bool ) -> dict[str, Any]: - pipe = await worker.redis.pipeline(transaction=False) - delayed = [ - pipe.zrange(worker.queue_key + priority, 0, -1) - for priority in worker.priorities - ] - commands = ( - pipe.xread( - {worker.stream_key + p: "0-0" for p in worker.priorities}, - count=1000, - ), - pipe.keys(worker.prefix + REDIS_RESULT + "*"), - pipe.keys(worker.prefix + REDIS_RUNNING + "*"), - pipe.keys(worker.prefix + REDIS_TASK + "*"), - ) - await pipe.execute() - _stream, _results, _running, _data = await asyncio.gather(*commands) + async with worker.redis.pipeline(transaction=False) as pipe: + delayed = [ + pipe.zrange(worker.queue_key + priority, 0, -1) + for priority in worker.priorities + ] + commands = ( + pipe.xread( + {worker.stream_key + p: "0-0" for p in worker.priorities}, + count=1000, + ), + pipe.keys(worker.prefix + REDIS_RESULT + "*"), + pipe.keys(worker.prefix + REDIS_RUNNING + "*"), + pipe.keys(worker.prefix + REDIS_TASK + "*"), + ) + _stream, _results, _running, _data = await gather(*commands) stream: set[str] = ( set(t.field_values["task_id"] for v in _stream.values() for t in v) # type: ignore if _stream else set() ) queue: set[str] = set() - for r in await asyncio.gather(*delayed): + for r in await gather(*delayed): queue |= set(r) results = set(r.split(":")[-1] for r in _results) running = set(r.split(":")[-1] for r in _running) diff --git a/streaq/utils.py b/streaq/utils.py index cc0a49b..6d24d0b 100644 --- a/streaq/utils.py +++ b/streaq/utils.py @@ -3,9 +3,9 @@ from functools import partial, wraps from importlib import import_module from logging import Formatter -from typing import Any, Callable +from typing import Any, Awaitable, Callable, TypeVar, overload -from anyio.abc import CapacityLimiter +from anyio import CapacityLimiter, create_task_group from anyio.to_thread import run_sync from streaq.types import P, R, TypedCoroutine @@ -15,6 +15,10 @@ class StreaqError(Exception): pass +class StreaqCancelled(StreaqError): + pass + + class TimezoneFormatter(Formatter): def __init__( self, @@ -150,3 +154,64 @@ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: return await run_sync(call, abandon_on_cancel=True, limiter=limiter) return wrapper + + +T1 = TypeVar("T1") +T2 = TypeVar("T2") +T3 = TypeVar("T3") +T4 = TypeVar("T4") +T5 = TypeVar("T5") + + +@overload +async def gather( + awaitable1: Awaitable[T1], + awaitable2: Awaitable[T2], + /, +) -> tuple[T1, T2]: ... + + +@overload +async def gather( + awaitable1: Awaitable[T1], + awaitable2: Awaitable[T2], + awaitable3: Awaitable[T3], + /, +) -> tuple[T1, T2, T3]: ... + + +@overload +async def gather( + awaitable1: Awaitable[T1], + awaitable2: Awaitable[T2], + awaitable3: Awaitable[T3], + awaitable4: Awaitable[T4], + /, +) -> tuple[T1, T2, T3, T4]: ... + + +@overload +async def gather( + awaitable1: Awaitable[T1], + awaitable2: Awaitable[T2], + awaitable3: Awaitable[T3], + awaitable4: Awaitable[T4], + awaitable5: Awaitable[T5], + /, +) -> tuple[T1, T2, T3, T4, T5]: ... + + +@overload +async def gather(*awaitables: Awaitable[T1]) -> tuple[T1, ...]: ... + + +async def gather(*awaitables: Awaitable[Any]) -> tuple[Any, ...]: + results: list[Any] = [None] * len(awaitables) + + async def runner(awaitable: Awaitable[Any], i: int) -> None: + results[i] = await awaitable + + async with create_task_group() as tg: + for i, awaitable in enumerate(awaitables): + tg.start_soon(runner, awaitable, i) + return tuple(results) diff --git a/streaq/worker.py b/streaq/worker.py index 34bc237..d6fda45 100644 --- a/streaq/worker.py +++ b/streaq/worker.py @@ -1,34 +1,39 @@ from __future__ import annotations -import asyncio import hmac import pickle import signal from collections import defaultdict -from contextlib import AbstractAsyncContextManager, asynccontextmanager +from contextlib import AbstractAsyncContextManager, AsyncExitStack, asynccontextmanager from contextvars import ContextVar from datetime import datetime, timedelta, timezone, tzinfo -from pathlib import Path -from typing import Any, AsyncIterator, Callable, Generic, cast +from inspect import iscoroutinefunction +from sys import platform +from typing import Any, AsyncGenerator, Callable, Generic, Literal, cast from uuid import uuid4 from anyio import ( + TASK_STATUS_IGNORED, + AsyncContextManagerMixin, CancelScope, CapacityLimiter, + Path, create_memory_object_stream, create_task_group, fail_after, + get_cancelled_exc_class, open_signal_receiver, run, sleep, ) +from anyio.abc import TaskStatus as AnyStatus from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from coredis import PureToken, Redis -from coredis.commands import Script from coredis.response._callbacks.streams import MultiStreamRangeCallback from coredis.sentinel import Sentinel from coredis.typing import KeyT from crontab import CronTab +from typing_extensions import Self from streaq import logger from streaq.constants import ( @@ -73,9 +78,11 @@ TaskDefinition, ) from streaq.utils import ( + StreaqCancelled, StreaqError, asyncify, datetime_ms, + gather, now_ms, to_ms, to_seconds, @@ -84,14 +91,14 @@ @asynccontextmanager -async def _lifespan() -> AsyncIterator[None]: +async def _lifespan() -> AsyncGenerator[None]: yield None async def _placeholder() -> None: ... -class Worker(Generic[C]): +class Worker(AsyncContextManagerMixin, Generic[C]): """ Worker object that fetches and executes tasks from a queue. @@ -120,12 +127,17 @@ class Worker(Generic[C]): :param idle_timeout: the amount of time to wait before re-enqueuing idle tasks (either prefetched tasks that don't run, or running tasks that become unresponsive) + :param anyio_backend: anyio backend to use, either Trio or asyncio + :param anyio_kwargs: extra arguments to pass to anyio backend + :param sentinel_nodes: list of (address, port) tuples to create sentinel from + :param sentinel_master: name of sentinel master to use + :param sentinel_kwargs: extra arguments to pass to sentinel (but not instances) """ _worker_context: C __slots__ = ( - "redis", + "_redis", "concurrency", "queue_name", "_group_name", @@ -142,6 +154,7 @@ class Worker(Generic[C]): "_running_tasks", "tz", "burst", + "trio", "_handle_signals", "_block_new_tasks", "lifespan", @@ -164,19 +177,13 @@ class Worker(Generic[C]): "signing_secret", "_task_context", "priorities", - "create_groups", - "publish_task", - "publish_delayed_tasks", - "fail_dependents", - "update_dependents", - "read_streams", + "_cancelled_class", + "_initialized", ) def __init__( self, redis_url: str = "redis://localhost:6379", - redis_sentinel_nodes: list[tuple[str, int]] | None = None, - redis_sentinel_master: str = "mymaster", redis_kwargs: dict[str, Any] | None = None, concurrency: int = 16, sync_concurrency: int | None = None, @@ -190,35 +197,30 @@ def __init__( handle_signals: bool = True, health_crontab: str = "*/5 * * * *", signing_secret: str | None = None, - idle_timeout: timedelta | int = 300, + idle_timeout: timedelta | int = 60, + anyio_backend: Literal["asyncio", "trio"] = "asyncio", + anyio_kwargs: dict[str, Any] | None = None, + sentinel_nodes: list[tuple[str, int]] | None = None, + sentinel_master: str = "mymaster", + sentinel_kwargs: dict[str, Any] | None = None, ): # Redis connection redis_kwargs = redis_kwargs or {} if redis_kwargs.pop("decode_responses", None) is not None: logger.warning("decode_responses ignored in redis_kwargs") - if redis_sentinel_nodes: + if sentinel_nodes: self._sentinel = Sentinel( - redis_sentinel_nodes, + sentinel_nodes, decode_responses=True, + sentinel_kwargs=sentinel_kwargs, **redis_kwargs, ) - self.redis = self._sentinel.primary_for(redis_sentinel_master) + self._redis = self._sentinel.primary_for(sentinel_master) else: - self.redis = Redis.from_url( + self._sentinel = None + self._redis = Redis.from_url( redis_url, decode_responses=True, **redis_kwargs ) - # register lua scripts - root = Path(__file__).parent / "lua" - - def register(name: str) -> Script[str]: - return self.redis.register_script((root / name).read_text()) - - self.create_groups = register("create_groups.lua") - self.publish_task = register("publish_task.lua") - self.publish_delayed_tasks = register("publish_delayed_tasks.lua") - self.fail_dependents = register("fail_dependents.lua") - self.update_dependents = register("update_dependents.lua") - self.read_streams = register("read_streams.lua") # user-facing properties self.concurrency = concurrency self.queue_name = queue_name @@ -243,6 +245,11 @@ def register(name: str) -> Script[str]: self.tz = tz #: whether to shut down the worker when the queue is empty; set via CLI self.burst = False + # save anyio configuration + self.anyio_backend = anyio_backend + self.anyio_kwargs = anyio_kwargs or {} + if self.anyio_backend == "asyncio" and "use_uvloop" not in self.anyio_kwargs: + self.anyio_kwargs["use_uvloop"] = platform != "win32" #: list of middlewares added to the worker self.middlewares: list[Middleware] = [] self.signing_secret = signing_secret.encode() if signing_secret else None @@ -259,6 +266,7 @@ def register(name: str) -> Script[str]: self.idle_timeout = to_ms(idle_timeout) self._health_tab = CronTab(health_crontab) self._task_context: ContextVar[TaskContext] = ContextVar("_task_context") + self._initialized = False # precalculate Redis prefixes self.prefix = REDIS_PREFIX + self.queue_name self.queue_key = self.prefix + REDIS_QUEUE @@ -275,23 +283,24 @@ async def _() -> None: """ Saves Redis health in Redis. """ - pipe = await self.redis.pipeline(transaction=False) - streams = [ - pipe.xlen(self.stream_key + priority) for priority in self.priorities - ] - queues = [ - pipe.zcard(self.queue_key + priority) for priority in self.priorities - ] - infos = ( - pipe.info("Memory", "Clients"), - pipe.dbsize(), - ) - await pipe.execute() - info, keys = await asyncio.gather(*infos) + async with self.redis.pipeline(transaction=False) as pipe: + streams = [ + pipe.xlen(self.stream_key + priority) + for priority in self.priorities + ] + queues = [ + pipe.zcard(self.queue_key + priority) + for priority in self.priorities + ] + infos = ( + pipe.info("Memory", "Clients"), + pipe.dbsize(), + ) + info, keys = await gather(*infos) mem_usage = info.get("used_memory_human", "?") clients = info.get("connected_clients", "?") - queued = sum(await asyncio.gather(*streams)) - scheduled = sum(await asyncio.gather(*queues)) + queued = sum(await gather(*streams)) + scheduled = sum(await gather(*queues)) health = ( f"redis {{memory: {mem_usage}, clients: {clients}, keys: {keys}, " # type: ignore f"queued: {queued}, scheduled: {scheduled}}}" @@ -307,14 +316,27 @@ def __str__(self) -> str: counters_str = repr(counters).replace("'", "") return f"worker {self.id} {counters_str}" - async def __aenter__(self) -> Worker[C]: - """ - Coredis will likely require an async context manager in the future! - """ - return self + @asynccontextmanager + async def __asynccontextmanager__(self) -> AsyncGenerator[Self]: + async with AsyncExitStack() as stack: + if self._sentinel: + await stack.enter_async_context( + self._sentinel.__asynccontextmanager__() + ) + await stack.enter_async_context(self._redis.__asynccontextmanager__()) + # register lua scripts from library + text = await (Path(__file__).parent / "lua/streaq.lua").read_text() + await self._redis.register_library("streaq", text, replace=True) + self._cancelled_class = get_cancelled_exc_class() + self._initialized = True + yield self + self._initialized = False - async def __aexit__(self, *args: Any): - pass + @property + def redis(self) -> Redis[str]: + if not self._initialized: + raise StreaqError("Worker not initialized, use the async context manager!") + return self._redis def task_context(self) -> TaskContext: """ @@ -332,7 +354,7 @@ def task_context(self) -> TaskContext: @property def context(self) -> C: """ - Worker dependencies initialized with the async context manager. + Worker dependencies initialized upon worker startup. This can only be called from within a running task or a middleware. """ if not self._running: @@ -388,7 +410,7 @@ def cron( """ def wrapped(fn: AsyncCron[R] | SyncCron[R]) -> RegisteredCron[C, R]: - if asyncio.iscoroutinefunction(fn): + if iscoroutinefunction(fn): _fn = fn else: _fn = asyncify(fn, self._limiter) @@ -442,7 +464,7 @@ def task( def wrapped( fn: AsyncTask[P, R] | SyncTask[P, R], ) -> RegisteredTask[C, P, R]: - if asyncio.iscoroutinefunction(fn): + if iscoroutinefunction(fn): _fn = fn else: _fn = asyncify(fn, self._limiter) @@ -477,24 +499,32 @@ def run_sync(self) -> None: """ Sync function to run the worker, finally closes worker connections. """ - run(self.run_async, backend_options={"use_uvloop": True}) + run( + self.run_async, + backend=self.anyio_backend, + backend_options=self.anyio_kwargs, + ) - async def run_async(self) -> None: + async def run_async( + self, *, task_status: AnyStatus[None] = TASK_STATUS_IGNORED + ) -> None: """ Async function to run the worker, finally closes worker connections. Groups together and runs worker tasks. """ logger.info(f"starting worker {self.id} for {len(self)} functions") start_time = now_ms() - # create consumer group if it doesn't exist - await self.create_groups( - keys=[self.stream_key, self._group_name], - args=self.priorities, # type: ignore - ) # run user-defined initialization code - async with self.lifespan as context: + async with self, self.lifespan as context: self._worker_context = context self._running = True + # create consumer group if it doesn't exist + await self.redis.fcall( + "create_groups", + keys=[self.stream_key, self._group_name], + args=self.priorities, # type: ignore + ) + task_status.started() # start tasks try: send, receive = create_memory_object_stream[StreamMessage]( @@ -532,13 +562,16 @@ async def renew_idle_timeouts(self) -> None: timeout = self.idle_timeout / 1000 * 0.9 # 10% buffer while True: await sleep(timeout) - pipe = await self.redis.pipeline(transaction=True) - for priority, tasks in self._running_tasks.items(): - if tasks: - pipe.xclaim( - self.stream_key + priority, self._group_name, self.id, 0, tasks - ) - await pipe.execute() + async with self.redis.pipeline(transaction=True) as pipe: + for priority, tasks in self._running_tasks.items(): + if tasks: + pipe.xclaim( + self.stream_key + priority, + self._group_name, + self.id, + 0, + tasks, + ) async def producer( self, @@ -565,7 +598,8 @@ async def producer( # Fetch new messages if count > 0: # non-blocking, priority ordered first - res = await self.read_streams( + res = await self.redis.fcall( + "read_streams", keys=[self.stream_key, self._group_name, self.id], args=[count, self.idle_timeout, *self.priorities], ) @@ -593,14 +627,13 @@ async def producer( ] ) # schedule delayed tasks - pipe = await self.redis.pipeline(transaction=False) - self.publish_delayed_tasks( - keys=[self.queue_key, self.stream_key], - args=[now_ms(), *self.priorities], - client=pipe, - ) - command = pipe.smembers(self._abort_key) - await pipe.execute() + async with self.redis.pipeline(transaction=False) as pipe: + pipe.fcall( + "publish_delayed_tasks", + keys=[self.queue_key, self.stream_key], + args=[now_ms(), *self.priorities], + ) + command = pipe.smembers(self._abort_key) aborted = await command # aborted tasks if aborted: @@ -686,23 +719,22 @@ def key(mid: str) -> str: if not silent: self.counters["failed"] += 1 stream_key = self.stream_key + msg.priority - pipe = await self.redis.pipeline(transaction=True) - pipe.delete([key(REDIS_RETRY), key(REDIS_RUNNING), key(REDIS_TASK)]) - pipe.publish(self._channel_key + task_id, raw) - pipe.srem(self._abort_key, [task_id]) - pipe.xack(stream_key, self._group_name, [msg.message_id]) - pipe.xdel(stream_key, [msg.message_id]) - if raw is not None and ttl: - pipe.set(key(REDIS_RESULT), raw, ex=ttl) - command = self.fail_dependents( - keys=[ - self.prefix + REDIS_DEPENDENTS, - self.prefix + REDIS_DEPENDENCIES, - task_id, - ], - client=pipe, - ) - await pipe.execute() + async with self.redis.pipeline(transaction=True) as pipe: + pipe.delete([key(REDIS_RETRY), key(REDIS_RUNNING), key(REDIS_TASK)]) + pipe.publish(self._channel_key + task_id, raw) + pipe.srem(self._abort_key, [task_id]) + pipe.xack(stream_key, self._group_name, [msg.message_id]) + pipe.xdel(stream_key, [msg.message_id]) + if raw is not None and ttl: + pipe.set(key(REDIS_RESULT), raw, ex=ttl) + command = pipe.fcall( + "fail_dependents", + keys=[ + self.prefix + REDIS_DEPENDENTS, + self.prefix + REDIS_DEPENDENCIES, + task_id, + ], + ) res = cast(list[str], await command) if res: await self.fail_task_dependents(res) @@ -733,73 +765,67 @@ def key(mid: str) -> str: return self.prefix + mid + task_id stream_key = self.stream_key + msg.priority - pipe = await self.redis.pipeline(transaction=True) - pipe.xack(stream_key, self._group_name, [msg.message_id]) - pipe.xdel(stream_key, [msg.message_id]) - to_delete: list[KeyT] = [key(REDIS_RUNNING)] - if lock_key: - to_delete.append(lock_key) - if finish: - data = { - "f": fn_name, - "et": enqueue_time, - "s": success, - "r": return_value, - "st": start_time, - "ft": finish_time, - "t": tries, - "w": self.id, - } - result = self.serialize(data) - pipe.publish(self._channel_key + task_id, result) - if not silent: + async with self.redis.pipeline(transaction=True) as pipe: + pipe.xack(stream_key, self._group_name, [msg.message_id]) + pipe.xdel(stream_key, [msg.message_id]) + to_delete: list[KeyT] = [key(REDIS_RUNNING)] + if lock_key: + to_delete.append(lock_key) + if finish: + data = { + "f": fn_name, + "et": enqueue_time, + "s": success, + "r": return_value, + "st": start_time, + "ft": finish_time, + "t": tries, + "w": self.id, + } + result = self.serialize(data) + pipe.publish(self._channel_key + task_id, result) + if not silent: + if success: + self.counters["completed"] += 1 + else: + self.counters["failed"] += 1 + if ttl != 0: + pipe.set(key(REDIS_RESULT), result, ex=ttl) + to_delete.extend([key(REDIS_RETRY), key(REDIS_TASK)]) + pipe.delete(to_delete) + pipe.srem(self._abort_key, [task_id]) if success: - self.counters["completed"] += 1 + output, truncate_length = str(return_value), 32 + if len(output) > truncate_length: + output = f"{output[:truncate_length]}…" + if not silent: + logger.info(f"task {fn_name} ■ {task_id} ← {output}") + if triggers: + args = self.serialize(to_tuple(return_value)) + pipe.set(key(REDIS_PREVIOUS), args, ex=timedelta(minutes=5)) + script = "update_dependents" else: - self.counters["failed"] += 1 - if ttl != 0: - pipe.set(key(REDIS_RESULT), result, ex=ttl) - to_delete.extend([key(REDIS_RETRY), key(REDIS_TASK)]) - pipe.delete(to_delete) - pipe.srem(self._abort_key, [task_id]) - if success: - output, truncate_length = str(return_value), 32 - if len(output) > truncate_length: - output = f"{output[:truncate_length]}…" - if not silent: - logger.info(f"task {fn_name} ■ {task_id} ← {output}") - if triggers: - args = self.serialize(to_tuple(return_value)) - pipe.set(key(REDIS_PREVIOUS), args, ex=timedelta(minutes=5)) - script = self.update_dependents + script = "fail_dependents" + command = pipe.fcall( + script, + keys=[ + self.prefix + REDIS_DEPENDENTS, + self.prefix + REDIS_DEPENDENCIES, + task_id, + ], + ) else: - script = self.fail_dependents - command = script( - keys=[ - self.prefix + REDIS_DEPENDENTS, - self.prefix + REDIS_DEPENDENCIES, - task_id, - ], - client=pipe, - ) - elif schedule: - if not silent: - self.counters["retried"] += 1 - pipe.delete(to_delete) - pipe.zadd(self.queue_key + msg.priority, {task_id: schedule}) - else: - if not silent: - self.counters["retried"] += 1 - pipe.delete(to_delete) - pipe.xadd(stream_key, {"task_id": task_id}) - await pipe.execute() + assert schedule is not None # this shouldn't be possible + if not silent: + self.counters["retried"] += 1 + pipe.delete(to_delete) + pipe.zadd(self.queue_key + msg.priority, {task_id: schedule}) if finish and (res := cast(list[str], await command)): # type: ignore if success: - pipe = await self.redis.pipeline(transaction=False) - for dep_id in res: - logger.info(f"↳ dependent {dep_id} triggered") - pipe.xadd(stream_key, {"task_id": dep_id}) - await pipe.execute() + async with self.redis.pipeline(transaction=False) as pipe: + for dep_id in res: + logger.info(f"↳ dependent {dep_id} triggered") + pipe.xadd(stream_key, {"task_id": dep_id}) else: await self.fail_task_dependents(res) @@ -824,22 +850,21 @@ async def prepare_task( def key(mid: str) -> str: return self.prefix + mid + task_id - pipe = await self.redis.pipeline(transaction=True) - commands = ( - pipe.get(key(REDIS_TASK)), - pipe.incr(key(REDIS_RETRY)), - pipe.srem(self._abort_key, [task_id]), - pipe.xclaim( - self.stream_key + msg.priority, - self._group_name, - self.id, - 0, - [msg.message_id], - justid=True, - ), - ) - await pipe.execute() - raw, task_try, abort, active = await asyncio.gather(*commands) + async with self.redis.pipeline(transaction=True) as pipe: + commands = ( + pipe.get(key(REDIS_TASK)), + pipe.incr(key(REDIS_RETRY)), + pipe.srem(self._abort_key, [task_id]), + pipe.xclaim( + self.stream_key + msg.priority, + self._group_name, + self.id, + 0, + [msg.message_id], + justid=True, + ), + ) + raw, task_try, abort, active = await gather(*commands) if not raw: logger.warning(f"task † {task_id} expired") return await self.finish_failed_task( @@ -872,7 +897,7 @@ def key(mid: str) -> str: logger.info(f"task {fn_name} ⊘ {task_id} aborted prior to run") return await self.finish_failed_task( msg, - asyncio.CancelledError("Task aborted prior to run!"), + StreaqCancelled("Task aborted prior to run!"), task_try, enqueue_time=data["t"], fn_name=fn_name, @@ -899,18 +924,17 @@ def key(mid: str) -> str: None if task.timeout is None else start_time + 1000 + to_ms(task.timeout) ) after = data.get("A") - pipe = await self.redis.pipeline(transaction=True) - if task.unique: - lock_key = self.prefix + REDIS_UNIQUE + fn_name - locked = pipe.set( - lock_key, task_id, get=True, condition=PureToken.NX, pxat=timeout - ) - else: - lock_key = None - pipe.set(key(REDIS_RUNNING), 1, pxat=timeout) - if after: - previous = pipe.get(self.prefix + REDIS_PREVIOUS + after) - await pipe.execute() + async with self.redis.pipeline(transaction=True) as pipe: + if task.unique: + lock_key = self.prefix + REDIS_UNIQUE + fn_name + locked = pipe.set( + lock_key, task_id, get=True, condition=PureToken.NX, pxat=timeout + ) + else: + lock_key = None + pipe.set(key(REDIS_RUNNING), 1, pxat=timeout) + if after: + previous = pipe.get(self.prefix + REDIS_PREVIOUS + after) if task.unique: existing = cast(str | None, await locked) # type: ignore # allow retries of the same task but not new ones @@ -940,10 +964,7 @@ async def run_task(self, msg: StreamMessage) -> None: """ Execute the registered task, then store the result in Redis. """ - res = None - with CancelScope(shield=True): - res = await self.prepare_task(msg) - if not res: + if not (res := await self.prepare_task(msg)): return task, data, task_try, _args, lock_key = res @@ -969,13 +990,13 @@ async def _fn(*args: Any, **kwargs: Any) -> Any: result: Any = None try: if self._block_new_tasks: - raise asyncio.CancelledError("Not running task, worker shut down!") + raise StreaqCancelled("Not running task, worker shut down!") with CancelScope() as scope: self._cancel_scopes[task_id] = scope self._running_tasks[msg.priority].add(msg.message_id) result = await wrapped(*_args, **data["k"]) if scope.cancelled_caught: - result = asyncio.CancelledError("Task aborted by user!") + result = StreaqCancelled("Task aborted by user!") success = False done = True if not task.silent: @@ -997,14 +1018,16 @@ async def _fn(*args: Any, **kwargs: Any) -> Any: schedule = now_ms() + delay if not task.silent: logger.exception(f"Retrying task {task_id}!") - logger.info(f"task {task.fn_name} ↻ {task_id} retrying in {delay}s") + logger.info( + f"task {task.fn_name} ↻ {task_id} retrying in {delay}ms" + ) except TimeoutError as e: if not task.silent: logger.error(f"task {task.fn_name} … {task_id} timed out") result = e success = False done = True - except asyncio.CancelledError: + except self._cancelled_class: if not task.silent: logger.info( f"task {task.fn_name} ↻ {task_id} cancelled, will be retried" @@ -1020,27 +1043,26 @@ async def _fn(*args: Any, **kwargs: Any) -> Any: logger.exception(f"Task {task_id} failed!") logger.info(f"task {task.fn_name} × {task_id} failed") finally: - with CancelScope(shield=True): - finish_time = now_ms() - self._cancel_scopes.pop(task_id, None) - self._running_tasks[msg.priority].remove(msg.message_id) - await self.finish_task( - msg, - finish=done, - schedule=schedule, - return_value=result, - start_time=start_time, - finish_time=finish_time or now_ms(), - enqueue_time=data["t"], - fn_name=data["f"], - success=success, - silent=task.silent, - ttl=task.ttl, - triggers=data.get("T"), - lock_key=lock_key, - tries=task_try, - ) - self._task_context.reset(token) + finish_time = now_ms() + self._cancel_scopes.pop(task_id, None) + self._running_tasks[msg.priority].remove(msg.message_id) + await self.finish_task( + msg, + finish=done, + schedule=schedule, + return_value=result, + start_time=start_time, + finish_time=finish_time or now_ms(), + enqueue_time=data["t"], + fn_name=data["f"], + success=success, + silent=task.silent, + ttl=task.ttl, + triggers=data.get("T"), + lock_key=lock_key, + tries=task_try, + ) + self._task_context.reset(token) async def fail_task_dependents(self, dependents: list[str]) -> None: """ @@ -1058,16 +1080,15 @@ async def fail_task_dependents(self, dependents: list[str]) -> None: "w": self.id, } result = self.serialize(failure) - pipe = await self.redis.pipeline(transaction=False) self.counters["failed"] += len(dependents) to_delete: list[KeyT] = [] - for dep_id in dependents: - logger.info(f"task dependent × {dep_id} failed") - to_delete.append(self.prefix + REDIS_TASK + dep_id) - pipe.set(self.results_key + dep_id, result, ex=300) - pipe.publish(self._channel_key + dep_id, result) - pipe.delete(to_delete) - await pipe.execute() + async with self.redis.pipeline(transaction=False) as pipe: + for dep_id in dependents: + logger.info(f"task dependent × {dep_id} failed") + to_delete.append(self.prefix + REDIS_TASK + dep_id) + pipe.set(self.results_key + dep_id, result, ex=300) + pipe.publish(self._channel_key + dep_id, result) + pipe.delete(to_delete) def enqueue_unsafe( self, @@ -1113,48 +1134,45 @@ async def enqueue_many(self, tasks: list[Task[Any]]) -> None: # importantly, we're not using `await` here tasks = [foobar.enqueue(i) for i in range(10)] - async with worker: - await worker.enqueue_many(tasks) + await worker.enqueue_many(tasks) """ enqueue_time = now_ms() - pipe = await self.redis.pipeline(transaction=False) - for task in tasks: - if task._after: # type: ignore - task.after.append(task._after.id) # type: ignore - if task.schedule: - score = datetime_ms(task.schedule) - elif task.delay is not None: - score = enqueue_time + to_ms(task.delay) - else: - score = 0 - data = task.serialize(enqueue_time) - _priority = task.priority or self.priorities[-1] - expire = to_ms(task.parent.expire or 0) - self.publish_task( - keys=[ - self.stream_key, - self.queue_key, - task.task_key(REDIS_TASK), - self.dependents_key, - self.dependencies_key, - self.results_key, - ], - args=[task.id, data, _priority, score, expire] + task.after, - client=pipe, - ) - await pipe.execute() + async with self.redis.pipeline(transaction=False) as pipe: + for task in tasks: + if task._after: # type: ignore + task.after.append(task._after.id) # type: ignore + if task.schedule: + score = datetime_ms(task.schedule) + elif task.delay is not None: + score = enqueue_time + to_ms(task.delay) + else: + score = 0 + data = task.serialize(enqueue_time) + _priority = task.priority or self.priorities[-1] + expire = to_ms(task.parent.expire or 0) + pipe.fcall( + "publish_task", + keys=[ + self.stream_key, + self.queue_key, + task.task_key(REDIS_TASK), + self.dependents_key, + self.dependencies_key, + self.results_key, + ], + args=[task.id, data, _priority, score, expire] + task.after, + ) async def queue_size(self) -> int: """ Returns the number of tasks currently queued in Redis. """ - pipe = await self.redis.pipeline(transaction=True) - commands = [ - pipe.xlen(self.stream_key + priority) for priority in self.priorities - ] + [pipe.zcard(self.queue_key + priority) for priority in self.priorities] - await pipe.execute() - return sum(await asyncio.gather(*commands)) + async with self.redis.pipeline(transaction=True) as pipe: + commands = [ + pipe.xlen(self.stream_key + priority) for priority in self.priorities + ] + [pipe.zcard(self.queue_key + priority) for priority in self.priorities] + return sum(await gather(*commands)) @property def active(self) -> int: @@ -1201,25 +1219,24 @@ async def status_by_id(self, task_id: str) -> TaskStatus: def key(mid: str) -> str: return self.prefix + mid + task_id - pipe = await self.redis.pipeline(transaction=True) - delayed = [ - pipe.zscore(self.queue_key + priority, task_id) - for priority in self.priorities - ] - commands = ( - pipe.exists([key(REDIS_RESULT)]), - pipe.exists([key(REDIS_RUNNING)]), - pipe.exists([key(REDIS_TASK)]), - pipe.exists([key(REDIS_DEPENDENCIES)]), - ) - await pipe.execute() - done, running, data, dependencies = await asyncio.gather(*commands) + async with self.redis.pipeline(transaction=True) as pipe: + delayed = [ + pipe.zscore(self.queue_key + priority, task_id) + for priority in self.priorities + ] + commands = ( + pipe.exists([key(REDIS_RESULT)]), + pipe.exists([key(REDIS_RUNNING)]), + pipe.exists([key(REDIS_TASK)]), + pipe.exists([key(REDIS_DEPENDENCIES)]), + ) + done, running, data, dependencies = await gather(*commands) if done: return TaskStatus.DONE elif running: return TaskStatus.RUNNING - score = any(r for r in await asyncio.gather(*delayed)) + score = any(r for r in await gather(*delayed)) if score or dependencies: return TaskStatus.SCHEDULED elif data: @@ -1238,11 +1255,11 @@ async def result_by_id( :return: wrapped result object """ result_key = self.results_key + task_id - async with self.redis.pubsub( - channels=[self._channel_key + task_id], ignore_subscribe_messages=True - ) as pubsub: - if not (raw := await self.redis.get(result_key)): - with fail_after(to_seconds(timeout)): + with fail_after(to_seconds(timeout)): + async with self.redis.pubsub( + channels=[self._channel_key + task_id], ignore_subscribe_messages=True + ) as pubsub: + if not (raw := await self.redis.get(result_key)): msg = await pubsub.__anext__() raw = msg["data"] # type: ignore data = self.deserialize(raw) @@ -1250,11 +1267,11 @@ async def result_by_id( fn_name=data["f"], enqueue_time=data["et"], success=data["s"], - result=data["r"], start_time=data["st"], finish_time=data["ft"], tries=data["t"], worker_id=data["w"], + _result=data["r"], ) async def abort_by_id( @@ -1279,9 +1296,7 @@ async def abort_by_id( return False try: result = await self.result_by_id(task_id, timeout=timeout) - return not result.success and isinstance( - result.result, asyncio.CancelledError - ) + return not result.success and isinstance(result.exception, StreaqCancelled) except TimeoutError: return False @@ -1297,26 +1312,23 @@ async def info_by_id(self, task_id: str) -> TaskInfo | None: def key(mid: str) -> str: return self.prefix + mid + task_id - pipe = await self.redis.pipeline(transaction=False) - delayed = [ - pipe.zscore(self.queue_key + priority, task_id) - for priority in self.priorities - ] - commands = ( - pipe.get(key(REDIS_RESULT)), - pipe.get(key(REDIS_TASK)), - pipe.get(key(REDIS_RETRY)), - pipe.smembers(key(REDIS_DEPENDENCIES)), - pipe.smembers(key(REDIS_DEPENDENTS)), - ) - await pipe.execute() - result, raw, try_count, dependencies, dependents = await asyncio.gather( - *commands - ) + async with self.redis.pipeline(transaction=False) as pipe: + delayed = [ + pipe.zscore(self.queue_key + priority, task_id) + for priority in self.priorities + ] + commands = ( + pipe.get(key(REDIS_RESULT)), + pipe.get(key(REDIS_TASK)), + pipe.get(key(REDIS_RETRY)), + pipe.smembers(key(REDIS_DEPENDENCIES)), + pipe.smembers(key(REDIS_DEPENDENTS)), + ) + result, raw, try_count, dependencies, dependents = await gather(*commands) if result or not raw: # if result exists or task data doesn't return None data = self.deserialize(raw) - res = await asyncio.gather(*delayed) + res = await gather(*delayed) score = next((r for r in res if r), None) dt = datetime.fromtimestamp(score / 1000, tz=self.tz) if score else None return TaskInfo( diff --git a/tests/conftest.py b/tests/conftest.py index 260031e..9bc3883 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,31 +1,37 @@ -from typing import Any, Generator +from typing import Literal from uuid import uuid4 from pytest import fixture -from testcontainers.redis import RedisContainer from streaq import Worker -@fixture(scope="module") -def anyio_backend() -> str: - return "asyncio" - - @fixture(scope="session") -def redis_container() -> Generator[RedisContainer, Any, None]: - with RedisContainer() as container: - yield container - container.get_client().flushdb() +def redis_url() -> str: + return "redis://redis-master:6379" -@fixture(scope="session") -def redis_url(redis_container: RedisContainer) -> Generator[str, None, None]: - host = redis_container.get_container_host_ip() - port = redis_container.get_exposed_port(redis_container.port) - yield f"redis://{host}:{port}" +@fixture(scope="function") +def sentinel_worker(anyio_backend: Literal["asyncio", "trio"]) -> Worker: + return Worker( + sentinel_nodes=[ + ("sentinel-1", 26379), + ("sentinel-2", 26379), + ("sentinel-3", 26379), + ], + sentinel_master="mymaster", + queue_name=uuid4().hex, + anyio_backend=anyio_backend, + ) @fixture(scope="function") -def worker(redis_url: str) -> Worker: - return Worker(redis_url=redis_url, queue_name=uuid4().hex) +def normal_worker(anyio_backend: Literal["asyncio", "trio"], redis_url: str) -> Worker: + return Worker( + redis_url=redis_url, queue_name=uuid4().hex, anyio_backend=anyio_backend + ) + + +@fixture(params=["direct", "sentinel"], ids=["redis", "sentinel"]) +def worker(request, normal_worker: Worker, sentinel_worker: Worker) -> Worker: + return normal_worker if request.param == "direct" else sentinel_worker diff --git a/tests/failure.py b/tests/failure.py index cc770ac..3b8ad00 100644 --- a/tests/failure.py +++ b/tests/failure.py @@ -1,22 +1,27 @@ import sys import pytest -from anyio import run, sleep +from anyio import create_task_group, run, sleep from streaq import Worker @pytest.mark.anyio -async def test_reclaim_idle_task(redis_url: str): +async def test_reclaim_idle_task(redis_url: str, task_id: str): worker1 = Worker(redis_url=redis_url, queue_name="reclaim", idle_timeout=3) - @worker1.task() + @worker1.task(name="foo") async def foo() -> None: await sleep(2) - await worker1.run_async() + async with create_task_group() as tg: + await tg.start(worker1.run_async) + task = foo.enqueue() + task.id = task_id + await task if __name__ == "__main__": redis_url = sys.argv[1] - run(test_reclaim_idle_task, redis_url) + task_id = sys.argv[2] + run(test_reclaim_idle_task, redis_url, task_id) diff --git a/tests/test_cli.py b/tests/test_cli.py index c5adac3..411ef88 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -86,11 +86,11 @@ def run_subprocess(): return e async def modify_file(): - await asyncio.sleep(1) # wait for startup + await sleep(1) # wait for startup with open(worker_file, "a") as f: f.write(" # change from test") - res, _ = await asyncio.gather(asyncio.to_thread(run_subprocess), modify_file()) + res, _ = await gather(run_sync(run_subprocess), modify_file()) assert str(res.value.stderr).count("starting") > 1 @@ -100,8 +100,10 @@ def find_free_port() -> int: # Finds and returns an available TCP port. return s.getsockname()[1] +@pytest.mark.xdist_group(name="web") async def test_web_cli(worker_file: str): file_name = worker_file.split("/")[-1][:-3] + port = find_free_port() def run_subprocess(): with pytest.raises(subprocess.TimeoutExpired) as e: @@ -113,7 +115,7 @@ def run_subprocess(): f"{file_name}.worker", "--web", "--port", - str(find_free_port()), + str(port), ], capture_output=True, text=True, @@ -123,10 +125,10 @@ def run_subprocess(): return e async def modify_file(): - await asyncio.sleep(1) # wait for startup - return httpx.get("http://localhost:8000/") + await sleep(1) # wait for startup + return httpx.get(f"http://localhost:{port}/") - web, res = await asyncio.gather(asyncio.to_thread(run_subprocess), modify_file()) + web, res = await gather(run_sync(run_subprocess), modify_file()) assert "Uvicorn" in str(web.value.stderr) assert res.status_code == 303 """ diff --git a/tests/test_task.py b/tests/test_task.py index f3acee4..f45b2fd 100644 --- a/tests/test_task.py +++ b/tests/test_task.py @@ -1,16 +1,16 @@ -import asyncio import time from datetime import datetime, timedelta from typing import Any from uuid import uuid4 import pytest -from anyio import create_task_group +from anyio import create_task_group, sleep from streaq import StreaqError, Worker from streaq.constants import REDIS_UNIQUE from streaq.task import StreaqRetry, TaskStatus from streaq.types import ReturnCoroutine +from streaq.utils import gather pytestmark = pytest.mark.anyio @@ -18,11 +18,11 @@ async def test_result_timeout(worker: Worker): @worker.task() async def foobar() -> None: - await asyncio.sleep(5) + await sleep(5) - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue() with pytest.raises(TimeoutError): await task.result(3) tg.cancel_scope.cancel() @@ -37,50 +37,41 @@ async def foobar() -> bool: async def test_task_timeout(worker: Worker): - @worker.task(timeout=1) + @worker.task(timeout=timedelta(seconds=1)) async def foobar() -> None: - await asyncio.sleep(5) + await sleep(5) - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue() res = await task.result(3) assert not res.success - assert isinstance(res.result, TimeoutError) + assert isinstance(res.exception, TimeoutError) tg.cancel_scope.cancel() async def test_task_status(worker: Worker): @worker.task() async def foobar() -> None: - await asyncio.sleep(1) + await sleep(2) + + async with worker: + task = foobar.enqueue() + assert await task.status() == TaskStatus.NOT_FOUND + await task.start() + assert await task.status() == TaskStatus.QUEUED + task2 = await foobar.enqueue().start(delay=5) - task = foobar.enqueue() - assert await task.status() == TaskStatus.NOT_FOUND - await task.start() - assert await task.status() == TaskStatus.QUEUED - task2 = await foobar.enqueue().start(delay=5) async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + await sleep(1) + assert await task.status() == TaskStatus.RUNNING await task.result(3) assert await task.status() == TaskStatus.DONE assert await task2.status() == TaskStatus.SCHEDULED tg.cancel_scope.cancel() -async def test_task_status_running(worker: Worker): - @worker.task() - async def foobar() -> None: - await asyncio.sleep(3) - - task = await foobar.enqueue().start() - async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(1) - assert await task.status() == TaskStatus.RUNNING - tg.cancel_scope.cancel() - - async def test_task_cron(worker: Worker): @worker.cron("30 9 1 1 *") async def cron1() -> bool: @@ -88,14 +79,14 @@ async def cron1() -> bool: @worker.cron("* * * * * * *") # once/second async def cron2() -> None: - await asyncio.sleep(3) + await sleep(3) schedule = cron1.schedule() assert schedule.day == 1 and schedule.month == 1 assert await cron1.run() async with create_task_group() as tg: tg.start_soon(worker.run_async) - await asyncio.sleep(2) + await sleep(2) # this will be set if task is running assert await worker.redis.get(worker.prefix + REDIS_UNIQUE + cron2.fn_name) tg.cancel_scope.cancel() @@ -106,12 +97,13 @@ async def test_task_info(worker: Worker): async def foobar() -> None: pass - task = await foobar.enqueue().start(delay=5) - task2 = await foobar.enqueue() - info = await task.info() - info2 = await task2.info() - assert info and info.scheduled is not None - assert info2 and info2.scheduled is None + async with worker: + task = await foobar.enqueue().start(delay=5) + task2 = await foobar.enqueue() + info = await task.info() + info2 = await task2.info() + assert info and info.scheduled is not None + assert info2 and info2.scheduled is None async def test_task_retry(worker: Worker): @@ -122,10 +114,10 @@ async def foobar() -> int: raise StreaqRetry("Retrying!") return ctx.tries - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) - res = await task.result(7) + await tg.start(worker.run_async) + task = await foobar.enqueue() + res = await task.result(10) assert res.success assert res.result == 3 tg.cancel_scope.cancel() @@ -139,12 +131,10 @@ async def foobar() -> int: raise StreaqRetry("Retrying!", delay=timedelta(seconds=3)) return ctx.tries - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) - with pytest.raises(TimeoutError): - await task.result(3) - res = await task.result(1) + await tg.start(worker.run_async) + task = await foobar.enqueue() + res = await task.result(5) assert res is not None assert res.success assert res.result == 2 @@ -157,16 +147,14 @@ async def foobar() -> int: ctx = worker.task_context() if ctx.tries == 1: raise StreaqRetry( - "Retrying!", schedule=datetime.now() + timedelta(seconds=3) + "Retrying!", schedule=datetime.now() + timedelta(seconds=2) ) return ctx.tries - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) - with pytest.raises(TimeoutError): - await task.result(3) - res = await task.result(1) + await tg.start(worker.run_async) + task = await foobar.enqueue() + res = await task.result(6) assert res is not None assert res.success assert res.result == 2 @@ -178,12 +166,14 @@ async def test_task_failure(worker: Worker): async def foobar() -> None: raise Exception("That wasn't supposed to happen!") - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue() res = await task.result(3) assert not res.success - assert isinstance(res.result, Exception) + assert isinstance(res.exception, Exception) + with pytest.raises(StreaqError): + _ = res.result tg.cancel_scope.cancel() @@ -194,9 +184,9 @@ async def foobar() -> bool: raise StreaqRetry("Retrying!", delay=0) return True - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue() res = await task.result(3) assert res is not None assert res.success @@ -209,13 +199,13 @@ async def test_task_max_retries(worker: Worker): async def foobar() -> None: raise StreaqRetry("Retrying!", delay=0) - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue() res = await task.result(3) assert res is not None assert not res.success - assert isinstance(res.result, StreaqError) + assert isinstance(res.exception, StreaqError) tg.cancel_scope.cancel() @@ -225,12 +215,14 @@ async def foobar() -> bool: return True worker.burst = True - task = await foobar.enqueue() - await worker.run_async() - result = await task.result(3) - assert result.success - assert result.result - assert not await task.abort() + async with create_task_group() as tg: + await tg.start(worker.run_async) + task = await foobar.enqueue() + result = await task.result(3) + assert result.success + assert result.result + assert not await task.abort() + tg.cancel_scope.cancel() async def test_task_nonexistent_or_finished_dependency(worker: Worker): @@ -238,9 +230,9 @@ async def test_task_nonexistent_or_finished_dependency(worker: Worker): async def foobar() -> None: pass - task = await foobar.enqueue().start(after="nonexistent") async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue().start(after="nonexistent") with pytest.raises(TimeoutError): await task.result(3) tg.cancel_scope.cancel() @@ -249,29 +241,31 @@ async def foobar() -> None: async def test_task_dependency(worker: Worker): @worker.task() async def foobar() -> None: - await asyncio.sleep(1) + await sleep(1) - task = await foobar.enqueue().start(delay=1) - task2 = await foobar.enqueue().start(after=task.id) async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue().start(delay=1) + task2 = await foobar.enqueue().start(after=task.id) assert await task2.status() == TaskStatus.SCHEDULED await task.result(3) result = await task2.result(3) assert result.success + with pytest.raises(StreaqError): + _ = result.exception tg.cancel_scope.cancel() async def test_task_dependency_multiple(worker: Worker): @worker.task() async def foobar() -> None: - await asyncio.sleep(1) + await sleep(1) - task = await foobar.enqueue().start() - task2 = await foobar.enqueue().start(after=task.id) - task3 = await foobar.enqueue().start(after=[task.id, task2.id]) async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue().start() + task2 = await foobar.enqueue().start(after=task.id) + task3 = await foobar.enqueue().start(after=[task.id, task2.id]) assert await task2.status() == TaskStatus.SCHEDULED assert await task3.status() == TaskStatus.SCHEDULED res1 = await task.result(3) @@ -293,13 +287,13 @@ async def foobar() -> None: async def do_nothing() -> None: pass - task = await foobar.enqueue().start() - dep = await do_nothing.enqueue().start(after=task.id) async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue().start() + dep = await do_nothing.enqueue().start(after=task.id) res = await dep.result(3) assert not res.success - assert isinstance(res.result, StreaqError) + assert isinstance(res.exception, StreaqError) tg.cancel_scope.cancel() @@ -308,12 +302,12 @@ async def test_sync_task(worker: Worker): def foobar() -> None: time.sleep(2) - task = await foobar.enqueue() - task2 = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue() + task2 = await foobar.enqueue() # this would time out if these were running sequentially - results = await asyncio.gather(task.result(3), task2.result(3)) + results = await gather(task.result(3), task2.result(3)) assert all(res.success for res in results) tg.cancel_scope.cancel() @@ -323,9 +317,9 @@ async def test_unsafe_enqueue(worker: Worker): async def foobar(ret: int) -> int: return ret - task = await worker.enqueue_unsafe(foobar.fn_name, 42) async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await worker.enqueue_unsafe(foobar.fn_name, 42) res = await task.result(3) assert res.success assert res.result == 42 @@ -341,17 +335,17 @@ async def foobar() -> None: async def child() -> None: pass - task = await foobar.enqueue().start(delay=timedelta(seconds=3)) - dep1 = await child.enqueue().start(after=task.id) - dep2 = await child.enqueue().start(after=[task.id, dep1.id]) async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(1) + await tg.start(worker.run_async) + task = await foobar.enqueue().start(delay=timedelta(seconds=3)) + dep1 = await child.enqueue().start(after=task.id) + dep2 = await child.enqueue().start(after=[task.id, dep1.id]) + await sleep(1) assert await task.abort(3) res1 = await dep1.result(3) res2 = await dep2.result(3) - assert not res1.success and isinstance(res1.result, StreaqError) - assert not res2.success and isinstance(res2.result, StreaqError) + assert not res1.success and isinstance(res1.exception, StreaqError) + assert not res2.success and isinstance(res2.exception, StreaqError) tg.cancel_scope.cancel() @@ -365,15 +359,17 @@ async def test_task_priorities(redis_url: str): @worker.task() async def foobar() -> None: - await asyncio.sleep(1) + await sleep(1) + + async with worker: + low = [foobar.enqueue().start(priority="low") for _ in range(4)] + high = [foobar.enqueue().start(priority="high") for _ in range(4)] + await worker.enqueue_many(low + high) - low = [foobar.enqueue().start(priority="low") for _ in range(4)] - high = [foobar.enqueue().start(priority="high") for _ in range(4)] - await worker.enqueue_many(low + high) async with create_task_group() as tg: - tg.start_soon(worker.run_async) - results = await asyncio.gather(*[t.result(3) for t in high]) - statuses = await asyncio.gather(*[t.status() for t in low]) + await tg.start(worker.run_async) + results = await gather(*[t.result(3) for t in high]) + statuses = await gather(*[t.status() for t in low]) assert all(res.success for res in results) assert all(status != TaskStatus.DONE for status in statuses) tg.cancel_scope.cancel() @@ -385,9 +381,9 @@ async def foobar() -> None: pass dt = datetime.now() + timedelta(seconds=1) - task = await foobar.enqueue().start(schedule=dt) async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue().start(schedule=dt) assert await task.status() == TaskStatus.SCHEDULED res = await task.result(3) assert res.success @@ -399,26 +395,29 @@ async def test_bad_start_params(worker: Worker): async def foobar() -> None: pass - with pytest.raises(StreaqError): - await foobar.enqueue().start(delay=1, schedule=datetime.now()) - with pytest.raises(StreaqError): - await foobar.enqueue().start(delay=1, after="foobar") - with pytest.raises(StreaqError): - await foobar.enqueue().start(schedule=datetime.now(), after="foobar") + async with worker: + with pytest.raises(StreaqError): + await foobar.enqueue().start(delay=1, schedule=datetime.now()) + with pytest.raises(StreaqError): + await foobar.enqueue().start(delay=1, after="foobar") + with pytest.raises(StreaqError): + await foobar.enqueue().start(schedule=datetime.now(), after="foobar") async def test_enqueue_unique_task(worker: Worker): @worker.task(unique=True) async def foobar() -> None: - await asyncio.sleep(1) + await sleep(1) - task = await foobar.enqueue() - task2 = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) - results = await asyncio.gather(task.result(), task2.result()) - assert any(isinstance(r.result, StreaqError) for r in results) - assert any(r.result is None for r in results) + await tg.start(worker.run_async) + task = await foobar.enqueue() + task2 = await foobar.enqueue() + results = await gather(task.result(), task2.result()) + assert any( + not r.success and isinstance(r.exception, StreaqError) for r in results + ) + assert any(r.success and r.result is None for r in results) tg.cancel_scope.cancel() @@ -427,10 +426,10 @@ async def test_failed_abort(worker: Worker): async def foobar() -> None: pass - task = await foobar.enqueue().start() async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(1) + await tg.start(worker.run_async) + task = await foobar.enqueue().start() + await sleep(1) assert not await task.abort(1) tg.cancel_scope.cancel() @@ -442,7 +441,7 @@ async def cron1() -> bool: @worker.cron("* * * * * * *", timeout=1) async def cron2() -> None: - await asyncio.sleep(3) + await sleep(3) assert await cron1.run() with pytest.raises(TimeoutError): @@ -455,8 +454,8 @@ def cronjob() -> None: time.sleep(3) async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(2) + await tg.start(worker.run_async) + await sleep(2) assert await worker.redis.get(worker.prefix + REDIS_UNIQUE + cronjob.fn_name) tg.cancel_scope.cancel() @@ -470,8 +469,8 @@ async def cronjob() -> None: val += 1 async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(5) + await tg.start(worker.run_async) + await sleep(5) assert val > 1 tg.cancel_scope.cancel() @@ -489,9 +488,9 @@ async def wrapper(*args, **kwargs) -> Any: return wrapper - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue() res = await task.result(3) assert res.success assert res.result == 4 @@ -507,9 +506,9 @@ async def double(val: int) -> int: async def is_even(val: int) -> bool: return val % 2 == 0 - task = await double.enqueue(1).then(double).then(is_even) async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await double.enqueue(1).then(double).then(is_even) res = await task.result(3) assert res.result and res.success tg.cancel_scope.cancel() @@ -531,11 +530,11 @@ async def foobar(): async def bar(): return 10 - task1 = await worker.enqueue_unsafe("bar") - task2 = await worker.enqueue_unsafe(bar.fn_name) - task3 = await worker.enqueue_unsafe(foo.fn.__qualname__) async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task1 = await worker.enqueue_unsafe("bar") + task2 = await worker.enqueue_unsafe(bar.fn_name) + task3 = await worker.enqueue_unsafe(foo.fn.__qualname__) res = await task1.result(3) assert res.result == 42 res = await task2.result(3) @@ -548,7 +547,7 @@ async def bar(): async def test_cron_with_custom_name(worker: Worker): @worker.cron("* * * * * * *", name="foo") async def cronjob() -> None: - await asyncio.sleep(3) + await sleep(3) async def cronjob1() -> None: pass @@ -558,8 +557,8 @@ async def cronjob1() -> None: worker.cron("* * * * * * *", name="foo")(cronjob1) async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(2) + await tg.start(worker.run_async) + await sleep(2) assert await worker.redis.get(worker.prefix + REDIS_UNIQUE + cronjob.fn_name) tg.cancel_scope.cancel() @@ -569,12 +568,12 @@ async def cronjob1() -> None: async def test_abort(worker: Worker, ttl: int, wait: int): @worker.task(ttl=ttl) async def foobar() -> None: - await asyncio.sleep(5) + await sleep(5) - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(wait) + await tg.start(worker.run_async) + task = await foobar.enqueue() + await sleep(wait) assert await task.abort(3) tg.cancel_scope.cancel() @@ -584,10 +583,26 @@ async def test_task_expired(worker: Worker): async def foobar() -> None: pass - task = await foobar.enqueue() - await asyncio.sleep(1) + async with worker: + task = await foobar.enqueue() + await sleep(1) + async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) res = await task.result(3) - assert not res.success and isinstance(res.result, StreaqError) + assert not res.success and isinstance(res.exception, StreaqError) + tg.cancel_scope.cancel() + + +async def test_cron_deterministic_id(worker: Worker): + @worker.cron("30 9 1 1 *") + async def cronjob() -> None: + pass + + async with create_task_group() as tg: + await tg.start(worker.run_async) + await sleep(1) + task = cronjob.enqueue() + assert await task.status() == TaskStatus.SCHEDULED + await task tg.cancel_scope.cancel() diff --git a/tests/test_web.py b/tests/test_web.py index 1b9ba81..1473efe 100644 --- a/tests/test_web.py +++ b/tests/test_web.py @@ -1,7 +1,5 @@ -import asyncio - import pytest -from anyio import create_task_group +from anyio import create_task_group, sleep from fastapi import FastAPI, HTTPException from httpx import ASGITransport, AsyncClient @@ -26,21 +24,20 @@ async def test_get_pages(worker: Worker): @worker.task() async def sleeper(time: int) -> None: - await asyncio.sleep(time) + await sleep(time) async def _get_worker(): yield worker - # queue up some tasks - scheduled = await sleeper.enqueue(10).start(delay=5) - done = await sleeper.enqueue(0) - running = await sleeper.enqueue(10) - queued = await sleeper.enqueue(10) - app.dependency_overrides[get_worker] = _get_worker async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(2) + await tg.start(worker.run_async) + # queue up some tasks + scheduled = await sleeper.enqueue(10).start(delay=5) + done = await sleeper.enqueue(0) + running = await sleeper.enqueue(10) + queued = await sleeper.enqueue(10) + await done.result(2) # make sure task is done async with AsyncClient( transport=ASGITransport(app=app), base_url="http://test" ) as client: diff --git a/tests/test_worker.py b/tests/test_worker.py index 9aae3c2..35bcf53 100644 --- a/tests/test_worker.py +++ b/tests/test_worker.py @@ -1,4 +1,3 @@ -import asyncio import json import os import pickle @@ -12,11 +11,10 @@ from uuid import uuid4 import pytest -from anyio import create_task_group, move_on_after +from anyio import create_task_group, sleep from streaq.constants import REDIS_TASK -from streaq.task import TaskStatus -from streaq.utils import StreaqError +from streaq.utils import StreaqError, gather from streaq.worker import Worker NAME_STR = "Freddy" @@ -24,7 +22,8 @@ async def test_worker_redis(worker: Worker): - await worker.redis.ping() + async with worker: + await worker.redis.ping() @dataclass @@ -44,10 +43,9 @@ async def test_lifespan(redis_url: str): async def foobar() -> str: return worker.context.name - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(1) + await tg.start(worker.run_async) + task = await foobar.enqueue() res = await task.result(3) assert res.success and res.result == NAME_STR tg.cancel_scope.cancel() @@ -60,8 +58,8 @@ async def test_health_check(redis_url: str): queue_name=uuid4().hex, ) async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(2) + await tg.start(worker.run_async) + await sleep(2) worker_health = await worker.redis.get(f"{worker._health_key}:{worker.id}") redis_health = await worker.redis.get(worker._health_key + ":redis") assert worker_health is not None @@ -70,7 +68,8 @@ async def test_health_check(redis_url: str): async def test_queue_size(worker: Worker): - assert await worker.queue_size() == 0 + async with worker: + assert await worker.queue_size() == 0 def raise_error(*arg, **kwargs) -> Any: @@ -84,8 +83,9 @@ async def test_bad_serializer(redis_url: str): async def foobar() -> None: print("This can't print!") - with pytest.raises(StreaqError): - await foobar.enqueue() + async with worker: + with pytest.raises(StreaqError): + await foobar.enqueue() async def test_bad_deserializer(redis_url: str): @@ -98,10 +98,11 @@ async def foobar() -> None: print("This can't print!") worker.burst = True - task = await foobar.enqueue() - await worker.run_async() - with pytest.raises(StreaqError): - await task.result(3) + async with create_task_group() as tg: + await tg.start(worker.run_async) + task = await foobar.enqueue() + with pytest.raises(StreaqError): + await task.result(3) async def test_custom_serializer(worker: Worker): @@ -112,9 +113,9 @@ async def test_custom_serializer(worker: Worker): async def foobar() -> None: pass - task = await foobar.enqueue() async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) + task = await foobar.enqueue() assert (await task.result(3)).success tg.cancel_scope.cancel() @@ -131,30 +132,31 @@ async def foobar() -> None: async def test_active_tasks(worker: Worker): @worker.task() async def foo() -> None: - await asyncio.sleep(3) + await sleep(10) n_tasks = 5 tasks = [foo.enqueue() for _ in range(n_tasks)] - await worker.enqueue_many(tasks) async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(1) - assert worker.active == n_tasks + await tg.start(worker.run_async) + await worker.enqueue_many(tasks) + await sleep(3) + assert worker.active >= n_tasks tg.cancel_scope.cancel() async def test_handle_signal(worker: Worker): @worker.task() async def foo() -> None: - await asyncio.sleep(3) + await sleep(3) async with create_task_group() as tg: - tg.start_soon(worker.run_async) - await asyncio.sleep(1) + await tg.start(worker.run_async) + await foo.enqueue() + await sleep(1) + assert worker.active > 0 os.kill(os.getpid(), signal.SIGINT) - - task = await foo.enqueue() - assert await task.status() == TaskStatus.QUEUED + await sleep(1) + assert worker.active == 0 async def test_reclaim_backed_up(redis_url: str): @@ -165,22 +167,21 @@ async def test_reclaim_backed_up(redis_url: str): worker2 = Worker(redis_url=redis_url, queue_name=queue_name, idle_timeout=1) async def foo() -> None: - await asyncio.sleep(4) + await sleep(3) registered = worker.task()(foo) worker2.task()(foo) # enqueue tasks tasks = [registered.enqueue() for _ in range(4)] - await worker.enqueue_many(tasks) async with create_task_group() as tg: # run first worker which will pick up all tasks - tg.start_soon(worker.run_async) - await asyncio.sleep(1) + await tg.start(worker.run_async) + await worker.enqueue_many(tasks) # run second worker which will pick up prefetched tasks - tg.start_soon(worker2.run_async) + await tg.start(worker2.run_async) - results = await asyncio.gather(*[t.result(8) for t in tasks]) + results = await gather(*[t.result(5) for t in tasks]) assert any(r.worker_id == worker2.id for r in results) tg.cancel_scope.cancel() @@ -188,21 +189,21 @@ async def foo() -> None: async def test_reclaim_idle_task(redis_url: str): worker2 = Worker(redis_url=redis_url, queue_name="reclaim", idle_timeout=3) - @worker2.task(timeout=3) + @worker2.task(name="foo") async def foo() -> None: - await asyncio.sleep(2) + await sleep(2) # enqueue task - task = await foo.enqueue() + task = foo.enqueue() # run separate worker which will pick up task - worker = subprocess.Popen([sys.executable, "tests/failure.py", redis_url]) - await asyncio.sleep(1) + worker = subprocess.Popen([sys.executable, "tests/failure.py", redis_url, task.id]) + await sleep(1) # kill worker abruptly to disallow cleanup os.kill(worker.pid, signal.SIGKILL) worker.wait() async with create_task_group() as tg: - tg.start_soon(worker2.run_async) + await tg.start(worker2.run_async) assert (await task.result(8)).success tg.cancel_scope.cancel() @@ -213,22 +214,24 @@ async def foo() -> None: worker = Worker(redis_url=redis_url, queue_name=uuid4().hex) foo1 = worker.cron("0 0 1 1 *")(foo) - with move_on_after(2): - await worker.run_async() - task1 = foo1.enqueue() - info = await task1.info() - assert info and foo1.schedule() == info.scheduled + async with create_task_group() as tg: + await tg.start(worker.run_async) + await sleep(2) + task1 = foo1.enqueue() + info = await task1.info() + assert info and foo1.schedule() == info.scheduled + tg.cancel_scope.cancel() worker2 = Worker(redis_url=redis_url, queue_name=worker.queue_name) foo2 = worker2.cron("1 0 1 1 *")(foo) # 1 minute later async with create_task_group() as tg: - tg.start_soon(worker2.run_async) - await asyncio.sleep(2) + await tg.start(worker2.run_async) + await sleep(2) + task2 = foo2.enqueue() + info2 = await task2.info() + assert info2 and foo2.schedule() == info2.scheduled + assert foo1.schedule() != foo2.schedule() tg.cancel_scope.cancel() - task2 = foo2.enqueue() - info2 = await task2.info() - assert info2 and foo2.schedule() == info2.scheduled - assert foo1.schedule() != foo2.schedule() async def test_signed_data(redis_url: str): @@ -243,7 +246,7 @@ async def foo() -> str: return "bar" async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) task = await foo.enqueue() res = await task.result(3) assert res.success and res.result == "bar" @@ -262,8 +265,9 @@ async def test_sign_non_binary_data(redis_url: str): async def foo() -> str: return "bar" - with pytest.raises(StreaqError): - await foo.enqueue() + async with worker: + with pytest.raises(StreaqError): + await foo.enqueue() async def test_corrupt_signed_data(redis_url: str): @@ -278,27 +282,29 @@ async def test_corrupt_signed_data(redis_url: str): async def foo() -> str: return "bar" - task = await foo.enqueue() - await worker.redis.set( - task.task_key(REDIS_TASK), pickle.dumps({"f": "This is an attack!"}) - ) + async with worker: + task = await foo.enqueue() + await worker.redis.set( + task.task_key(REDIS_TASK), pickle.dumps({"f": "This is an attack!"}) + ) async with create_task_group() as tg: - tg.start_soon(worker.run_async) + await tg.start(worker.run_async) res = await task.result(5) - assert not res.success and isinstance(res.result, StreaqError) + assert not res.success and isinstance(res.exception, StreaqError) tg.cancel_scope.cancel() async def test_enqueue_many(worker: Worker): @worker.task() async def foobar(val: int) -> int: - await asyncio.sleep(1) + await sleep(1) return val - tasks = [foobar.enqueue(i) for i in range(10)] - await worker.enqueue_many(tasks) - assert await worker.queue_size() >= len(tasks) + async with worker: + tasks = [foobar.enqueue(i) for i in range(10)] + await worker.enqueue_many(tasks) + assert await worker.queue_size() >= len(tasks) async def test_invalid_task_context(worker: Worker): diff --git a/uv.lock b/uv.lock index bbc9b03..affe060 100644 --- a/uv.lock +++ b/uv.lock @@ -27,7 +27,7 @@ wheels = [ [[package]] name = "anyio" -version = "4.10.0" +version = "4.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, @@ -35,9 +35,9 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, ] [[package]] @@ -107,6 +107,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, ] +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + [[package]] name = "autodocsumm" version = "0.2.14" @@ -178,6 +187,34 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, ] +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" }, + { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + [[package]] name = "charset-normalizer" version = "3.4.3" @@ -244,14 +281,14 @@ wheels = [ [[package]] name = "click" -version = "8.2.1" +version = "8.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, ] [[package]] @@ -265,124 +302,114 @@ wheels = [ [[package]] name = "coredis" -version = "5.0.1" -source = { registry = "https://pypi.org/simple" } +version = "5.0.1.post0.dev25" +source = { git = "https://github.com/Graeme22/coredis.git?rev=anyio#6402f4aef4e2fdb469bdb3d1edbd84e697c4a995" } dependencies = [ - { name = "async-timeout" }, + { name = "anyio" }, { name = "beartype" }, { name = "deprecated" }, { name = "packaging" }, { name = "pympler" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/69/3b/a228f0b6d6e7e1b54d105c25800c5ca0ba2eaf4c45bfe4d81528dff30942/coredis-5.0.1.tar.gz", hash = "sha256:c312dc11b785367b2da440cba3ebd7a47ca0bbf673d74d923a590aa77550c73f", size = 244540, upload-time = "2025-07-18T17:46:35.695Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/01/5d/1dd8c76e99e9a50076b0d1cad54fdea354badd802f0245e195e385e2038e/coredis-5.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2592b58b7c74646213a8493d9578cb9a2861f15bd5ba7400d9beaad897ed6e8c", size = 331481, upload-time = "2025-07-18T17:46:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/6b/47/c834eb1c1f17f0a748a5101449775d96f0fc5d12cd1e3ec4ca853c9e9f52/coredis-5.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0317b2212fd36057eb4866e79ffc722d76c87d4481dd1f73eae72d5850dd333d", size = 326580, upload-time = "2025-07-18T17:46:08.997Z" }, - { url = "https://files.pythonhosted.org/packages/10/dc/b9f6a1ccd1b43637139ee561adf6738a03a17aaad15607c4f54da050adc6/coredis-5.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d569ae964da91f805e60494179c3871eb5e0ed34c3a8f279a086a34f7b40d70f", size = 354885, upload-time = "2025-07-18T17:46:10.231Z" }, - { url = "https://files.pythonhosted.org/packages/04/d4/394cbef26707fccbb9272bb5706f9aae77939505c23b3b3c43fbf0d42a8e/coredis-5.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7562e6f4b261160eb130eb4a7d17f344870496edb06371c5304c86289bac4f37", size = 358407, upload-time = "2025-07-18T17:46:11.469Z" }, - { url = "https://files.pythonhosted.org/packages/c6/74/f4a5fe3d909c876c5a4d14e8d1f5c651f39cbdac28fe784138c989abd558/coredis-5.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a26f6671b29bd5c8f6902be736770e6b15564d765707b5014d05ba837d477ce", size = 361020, upload-time = "2025-07-18T17:46:12.75Z" }, - { url = "https://files.pythonhosted.org/packages/57/71/fb56e7808452d7a6b89ee9d3e7f04ae687f3b6d25107eef411b4bfbf0ddc/coredis-5.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3fdaad2567aea8281fbbd99ec9e153722f08e5e87eb2e75848873cea44245fe8", size = 330451, upload-time = "2025-07-18T17:46:14.066Z" }, - { url = "https://files.pythonhosted.org/packages/67/91/04e2ed48fb8d10ac2f3415741d503bd0c84c13ac6f607f8b49afd30e7157/coredis-5.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46b9973fe126f35083d0200a7749e48a25ac0c6faa2b42284cd733267520357b", size = 325585, upload-time = "2025-07-18T17:46:15.257Z" }, - { url = "https://files.pythonhosted.org/packages/4d/51/e65045f5f76eff562987efb2a060a021c0ca342180d14b9fa80db6690645/coredis-5.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c793028b5386166316cb268cd832e5b451a4efab908179449f16bc026a1f763b", size = 353723, upload-time = "2025-07-18T17:46:16.444Z" }, - { url = "https://files.pythonhosted.org/packages/e7/55/75971f0d77a61fdb1fe8764bc81f7927528ac8c4ed06b34d8926bd62402f/coredis-5.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1705038b9fac64443e42f6707892f8560c98673e209e5a9d0c93cd0ae3640a50", size = 357279, upload-time = "2025-07-18T17:46:18.109Z" }, - { url = "https://files.pythonhosted.org/packages/be/7c/8d3261c40b6cddb4bd4892adb6087ab81d55bd7486cb421a7f769c55090d/coredis-5.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1386440fa0bd5acf01e11e4edc26c217f65096ae3a00c72910bdc6a3ae39d432", size = 359564, upload-time = "2025-07-18T17:46:19.399Z" }, - { url = "https://files.pythonhosted.org/packages/43/5b/65177c09ba61b02c888da11ba7205ba04a1dbd353e6ea368599634eda001/coredis-5.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1e34fe52965ce80ae33a662a47e140623666998a778a059403a5b3493a3aad5e", size = 330552, upload-time = "2025-07-18T17:46:20.627Z" }, - { url = "https://files.pythonhosted.org/packages/92/45/7dff636e166ecf1b755bd3aa3ac78e972085a7184bc58f87c42e1e895a98/coredis-5.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d0e79e288306a080dfb5a4553734de652c49c3653ca7c6de748846e30f6d1bd0", size = 324567, upload-time = "2025-07-18T17:46:21.901Z" }, - { url = "https://files.pythonhosted.org/packages/a0/ae/de6915f2604e03da4c3b8fd57971ec2d7bea0bea664921ad6fd84d5315b7/coredis-5.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdc0137077cca1f29f659711bd98e36bb8da3c75451e6374c0e462e2ec54238c", size = 354767, upload-time = "2025-07-18T17:46:23.082Z" }, - { url = "https://files.pythonhosted.org/packages/3a/bb/fa99ed9a3adcf89931638bf0fa3474c8b5126cd6ae09e6cbd3f3815001d2/coredis-5.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27123964f0609f2d361f668f406c0a284c7362f3d4e56b25646f87445036af23", size = 359068, upload-time = "2025-07-18T17:46:24.704Z" }, - { url = "https://files.pythonhosted.org/packages/54/5c/1f140b1a771225c7c93c182240a1a47d5393e2439bbdb104edfbc1e9619d/coredis-5.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:856a955fc5c6ecc40889b67b605b23082d77a6da2b404b73f0cca8721868e182", size = 361801, upload-time = "2025-07-18T17:46:25.976Z" }, - { url = "https://files.pythonhosted.org/packages/ab/3d/0052462d245b371c277bf53d41fb5a2e3e6823a889badb0f4ad59aa87ced/coredis-5.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:70a1311ba33d3b4f204d6670be16f269d5b1805237af6e211ca92872061e3e3d", size = 330359, upload-time = "2025-07-18T17:46:27.194Z" }, - { url = "https://files.pythonhosted.org/packages/90/d4/8baba401f09369697e077d993455f229c2e0ab2717956b557d3316d2898d/coredis-5.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4948208e0f51a8b67c5a9d2bce808b4e86fe51adf09d11ba02ba2aa17e50ac62", size = 324395, upload-time = "2025-07-18T17:46:28.478Z" }, - { url = "https://files.pythonhosted.org/packages/73/ce/afa13b168281b57e1277a074ef2190c004b65470f881a058f4faec6dd708/coredis-5.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33a11a95308049df4f31c03df38012b7c532595095487cd49726df39f58bbf83", size = 354376, upload-time = "2025-07-18T17:46:30.147Z" }, - { url = "https://files.pythonhosted.org/packages/aa/4f/51902e6645f0bbddcdf528496e2c909e866ccc3e8fa4837ad7cf0f3e48cc/coredis-5.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed99b915b31a17a3c67d7ea5a9558d70f4378408c2e16078b836538ae1198641", size = 358728, upload-time = "2025-07-18T17:46:31.304Z" }, - { url = "https://files.pythonhosted.org/packages/8e/56/f3aca55db79a9dc3d8b1d06e2eba02300131e8072808353c58a69e9cedbd/coredis-5.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e45fa763cb20da9c37b71334c962126fdad16bb3db8aea8ea7dc8f69bdc5bca", size = 361107, upload-time = "2025-07-18T17:46:32.543Z" }, - { url = "https://files.pythonhosted.org/packages/a3/62/82fc2b716ef0ab1b092efb3f76abe2def725d2e7ec2e930aded538784f51/coredis-5.0.1-py3-none-any.whl", hash = "sha256:1bf6098a17832d232ca429640662b25be378139dd54574853ac21dc48cefd4ba", size = 238704, upload-time = "2025-07-18T17:46:34.166Z" }, -] [[package]] name = "coverage" -version = "7.10.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/61/83/153f54356c7c200013a752ce1ed5448573dca546ce125801afca9e1ac1a4/coverage-7.10.5.tar.gz", hash = "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6", size = 821662, upload-time = "2025-08-23T14:42:44.78Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/70/e77b0061a6c7157bfce645c6b9a715a08d4c86b3360a7b3252818080b817/coverage-7.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c6a5c3414bfc7451b879141ce772c546985163cf553f08e0f135f0699a911801", size = 216774, upload-time = "2025-08-23T14:40:26.301Z" }, - { url = "https://files.pythonhosted.org/packages/91/08/2a79de5ecf37ee40f2d898012306f11c161548753391cec763f92647837b/coverage-7.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bc8e4d99ce82f1710cc3c125adc30fd1487d3cf6c2cd4994d78d68a47b16989a", size = 217175, upload-time = "2025-08-23T14:40:29.142Z" }, - { url = "https://files.pythonhosted.org/packages/64/57/0171d69a699690149a6ba6a4eb702814448c8d617cf62dbafa7ce6bfdf63/coverage-7.10.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:02252dc1216e512a9311f596b3169fad54abcb13827a8d76d5630c798a50a754", size = 243931, upload-time = "2025-08-23T14:40:30.735Z" }, - { url = "https://files.pythonhosted.org/packages/15/06/3a67662c55656702bd398a727a7f35df598eb11104fcb34f1ecbb070291a/coverage-7.10.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:73269df37883e02d460bee0cc16be90509faea1e3bd105d77360b512d5bb9c33", size = 245740, upload-time = "2025-08-23T14:40:32.302Z" }, - { url = "https://files.pythonhosted.org/packages/00/f4/f8763aabf4dc30ef0d0012522d312f0b7f9fede6246a1f27dbcc4a1e523c/coverage-7.10.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f8a81b0614642f91c9effd53eec284f965577591f51f547a1cbeb32035b4c2f", size = 247600, upload-time = "2025-08-23T14:40:33.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/31/6632219a9065e1b83f77eda116fed4c76fb64908a6a9feae41816dab8237/coverage-7.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6a29f8e0adb7f8c2b95fa2d4566a1d6e6722e0a637634c6563cb1ab844427dd9", size = 245640, upload-time = "2025-08-23T14:40:35.248Z" }, - { url = "https://files.pythonhosted.org/packages/6e/e2/3dba9b86037b81649b11d192bb1df11dde9a81013e434af3520222707bc8/coverage-7.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fcf6ab569436b4a647d4e91accba12509ad9f2554bc93d3aee23cc596e7f99c3", size = 243659, upload-time = "2025-08-23T14:40:36.815Z" }, - { url = "https://files.pythonhosted.org/packages/02/b9/57170bd9f3e333837fc24ecc88bc70fbc2eb7ccfd0876854b0c0407078c3/coverage-7.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:90dc3d6fb222b194a5de60af8d190bedeeddcbc7add317e4a3cd333ee6b7c879", size = 244537, upload-time = "2025-08-23T14:40:38.737Z" }, - { url = "https://files.pythonhosted.org/packages/b3/1c/93ac36ef1e8b06b8d5777393a3a40cb356f9f3dab980be40a6941e443588/coverage-7.10.5-cp310-cp310-win32.whl", hash = "sha256:414a568cd545f9dc75f0686a0049393de8098414b58ea071e03395505b73d7a8", size = 219285, upload-time = "2025-08-23T14:40:40.342Z" }, - { url = "https://files.pythonhosted.org/packages/30/95/23252277e6e5fe649d6cd3ed3f35d2307e5166de4e75e66aa7f432abc46d/coverage-7.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:e551f9d03347196271935fd3c0c165f0e8c049220280c1120de0084d65e9c7ff", size = 220185, upload-time = "2025-08-23T14:40:42.026Z" }, - { url = "https://files.pythonhosted.org/packages/cb/f2/336d34d2fc1291ca7c18eeb46f64985e6cef5a1a7ef6d9c23720c6527289/coverage-7.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c177e6ffe2ebc7c410785307758ee21258aa8e8092b44d09a2da767834f075f2", size = 216890, upload-time = "2025-08-23T14:40:43.627Z" }, - { url = "https://files.pythonhosted.org/packages/39/ea/92448b07cc1cf2b429d0ce635f59cf0c626a5d8de21358f11e92174ff2a6/coverage-7.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14d6071c51ad0f703d6440827eaa46386169b5fdced42631d5a5ac419616046f", size = 217287, upload-time = "2025-08-23T14:40:45.214Z" }, - { url = "https://files.pythonhosted.org/packages/96/ba/ad5b36537c5179c808d0ecdf6e4aa7630b311b3c12747ad624dcd43a9b6b/coverage-7.10.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:61f78c7c3bc272a410c5ae3fde7792b4ffb4acc03d35a7df73ca8978826bb7ab", size = 247683, upload-time = "2025-08-23T14:40:46.791Z" }, - { url = "https://files.pythonhosted.org/packages/28/e5/fe3bbc8d097029d284b5fb305b38bb3404895da48495f05bff025df62770/coverage-7.10.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f39071caa126f69d63f99b324fb08c7b1da2ec28cbb1fe7b5b1799926492f65c", size = 249614, upload-time = "2025-08-23T14:40:48.082Z" }, - { url = "https://files.pythonhosted.org/packages/69/9c/a1c89a8c8712799efccb32cd0a1ee88e452f0c13a006b65bb2271f1ac767/coverage-7.10.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343a023193f04d46edc46b2616cdbee68c94dd10208ecd3adc56fcc54ef2baa1", size = 251719, upload-time = "2025-08-23T14:40:49.349Z" }, - { url = "https://files.pythonhosted.org/packages/e9/be/5576b5625865aa95b5633315f8f4142b003a70c3d96e76f04487c3b5cc95/coverage-7.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:585ffe93ae5894d1ebdee69fc0b0d4b7c75d8007983692fb300ac98eed146f78", size = 249411, upload-time = "2025-08-23T14:40:50.624Z" }, - { url = "https://files.pythonhosted.org/packages/94/0a/e39a113d4209da0dbbc9385608cdb1b0726a4d25f78672dc51c97cfea80f/coverage-7.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0ef4e66f006ed181df29b59921bd8fc7ed7cd6a9289295cd8b2824b49b570df", size = 247466, upload-time = "2025-08-23T14:40:52.362Z" }, - { url = "https://files.pythonhosted.org/packages/40/cb/aebb2d8c9e3533ee340bea19b71c5b76605a0268aa49808e26fe96ec0a07/coverage-7.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb7b0bbf7cc1d0453b843eca7b5fa017874735bef9bfdfa4121373d2cc885ed6", size = 248104, upload-time = "2025-08-23T14:40:54.064Z" }, - { url = "https://files.pythonhosted.org/packages/08/e6/26570d6ccce8ff5de912cbfd268e7f475f00597cb58da9991fa919c5e539/coverage-7.10.5-cp311-cp311-win32.whl", hash = "sha256:1d043a8a06987cc0c98516e57c4d3fc2c1591364831e9deb59c9e1b4937e8caf", size = 219327, upload-time = "2025-08-23T14:40:55.424Z" }, - { url = "https://files.pythonhosted.org/packages/79/79/5f48525e366e518b36e66167e3b6e5db6fd54f63982500c6a5abb9d3dfbd/coverage-7.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:fefafcca09c3ac56372ef64a40f5fe17c5592fab906e0fdffd09543f3012ba50", size = 220213, upload-time = "2025-08-23T14:40:56.724Z" }, - { url = "https://files.pythonhosted.org/packages/40/3c/9058128b7b0bf333130c320b1eb1ae485623014a21ee196d68f7737f8610/coverage-7.10.5-cp311-cp311-win_arm64.whl", hash = "sha256:7e78b767da8b5fc5b2faa69bb001edafcd6f3995b42a331c53ef9572c55ceb82", size = 218893, upload-time = "2025-08-23T14:40:58.011Z" }, - { url = "https://files.pythonhosted.org/packages/27/8e/40d75c7128f871ea0fd829d3e7e4a14460cad7c3826e3b472e6471ad05bd/coverage-7.10.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c2d05c7e73c60a4cecc7d9b60dbfd603b4ebc0adafaef371445b47d0f805c8a9", size = 217077, upload-time = "2025-08-23T14:40:59.329Z" }, - { url = "https://files.pythonhosted.org/packages/18/a8/f333f4cf3fb5477a7f727b4d603a2eb5c3c5611c7fe01329c2e13b23b678/coverage-7.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32ddaa3b2c509778ed5373b177eb2bf5662405493baeff52278a0b4f9415188b", size = 217310, upload-time = "2025-08-23T14:41:00.628Z" }, - { url = "https://files.pythonhosted.org/packages/ec/2c/fbecd8381e0a07d1547922be819b4543a901402f63930313a519b937c668/coverage-7.10.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dd382410039fe062097aa0292ab6335a3f1e7af7bba2ef8d27dcda484918f20c", size = 248802, upload-time = "2025-08-23T14:41:02.012Z" }, - { url = "https://files.pythonhosted.org/packages/3f/bc/1011da599b414fb6c9c0f34086736126f9ff71f841755786a6b87601b088/coverage-7.10.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7fa22800f3908df31cea6fb230f20ac49e343515d968cc3a42b30d5c3ebf9b5a", size = 251550, upload-time = "2025-08-23T14:41:03.438Z" }, - { url = "https://files.pythonhosted.org/packages/4c/6f/b5c03c0c721c067d21bc697accc3642f3cef9f087dac429c918c37a37437/coverage-7.10.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f366a57ac81f5e12797136552f5b7502fa053c861a009b91b80ed51f2ce651c6", size = 252684, upload-time = "2025-08-23T14:41:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/f9/50/d474bc300ebcb6a38a1047d5c465a227605d6473e49b4e0d793102312bc5/coverage-7.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1dc8f1980a272ad4a6c84cba7981792344dad33bf5869361576b7aef42733a", size = 250602, upload-time = "2025-08-23T14:41:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/4a/2d/548c8e04249cbba3aba6bd799efdd11eee3941b70253733f5d355d689559/coverage-7.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2285c04ee8676f7938b02b4936d9b9b672064daab3187c20f73a55f3d70e6b4a", size = 248724, upload-time = "2025-08-23T14:41:08.429Z" }, - { url = "https://files.pythonhosted.org/packages/e2/96/a7c3c0562266ac39dcad271d0eec8fc20ab576e3e2f64130a845ad2a557b/coverage-7.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c2492e4dd9daab63f5f56286f8a04c51323d237631eb98505d87e4c4ff19ec34", size = 250158, upload-time = "2025-08-23T14:41:09.749Z" }, - { url = "https://files.pythonhosted.org/packages/f3/75/74d4be58c70c42ef0b352d597b022baf12dbe2b43e7cb1525f56a0fb1d4b/coverage-7.10.5-cp312-cp312-win32.whl", hash = "sha256:38a9109c4ee8135d5df5505384fc2f20287a47ccbe0b3f04c53c9a1989c2bbaf", size = 219493, upload-time = "2025-08-23T14:41:11.095Z" }, - { url = "https://files.pythonhosted.org/packages/4f/08/364e6012d1d4d09d1e27437382967efed971d7613f94bca9add25f0c1f2b/coverage-7.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:6b87f1ad60b30bc3c43c66afa7db6b22a3109902e28c5094957626a0143a001f", size = 220302, upload-time = "2025-08-23T14:41:12.449Z" }, - { url = "https://files.pythonhosted.org/packages/db/d5/7c8a365e1f7355c58af4fe5faf3f90cc8e587590f5854808d17ccb4e7077/coverage-7.10.5-cp312-cp312-win_arm64.whl", hash = "sha256:672a6c1da5aea6c629819a0e1461e89d244f78d7b60c424ecf4f1f2556c041d8", size = 218936, upload-time = "2025-08-23T14:41:13.872Z" }, - { url = "https://files.pythonhosted.org/packages/9f/08/4166ecfb60ba011444f38a5a6107814b80c34c717bc7a23be0d22e92ca09/coverage-7.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c", size = 217106, upload-time = "2025-08-23T14:41:15.268Z" }, - { url = "https://files.pythonhosted.org/packages/25/d7/b71022408adbf040a680b8c64bf6ead3be37b553e5844f7465643979f7ca/coverage-7.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44", size = 217353, upload-time = "2025-08-23T14:41:16.656Z" }, - { url = "https://files.pythonhosted.org/packages/74/68/21e0d254dbf8972bb8dd95e3fe7038f4be037ff04ba47d6d1b12b37510ba/coverage-7.10.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc", size = 248350, upload-time = "2025-08-23T14:41:18.128Z" }, - { url = "https://files.pythonhosted.org/packages/90/65/28752c3a896566ec93e0219fc4f47ff71bd2b745f51554c93e8dcb659796/coverage-7.10.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869", size = 250955, upload-time = "2025-08-23T14:41:19.577Z" }, - { url = "https://files.pythonhosted.org/packages/a5/eb/ca6b7967f57f6fef31da8749ea20417790bb6723593c8cd98a987be20423/coverage-7.10.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f", size = 252230, upload-time = "2025-08-23T14:41:20.959Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/17a411b2a2a18f8b8c952aa01c00f9284a1fbc677c68a0003b772ea89104/coverage-7.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5", size = 250387, upload-time = "2025-08-23T14:41:22.644Z" }, - { url = "https://files.pythonhosted.org/packages/c7/89/97a9e271188c2fbb3db82235c33980bcbc733da7da6065afbaa1d685a169/coverage-7.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c", size = 248280, upload-time = "2025-08-23T14:41:24.061Z" }, - { url = "https://files.pythonhosted.org/packages/d1/c6/0ad7d0137257553eb4706b4ad6180bec0a1b6a648b092c5bbda48d0e5b2c/coverage-7.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2", size = 249894, upload-time = "2025-08-23T14:41:26.165Z" }, - { url = "https://files.pythonhosted.org/packages/84/56/fb3aba936addb4c9e5ea14f5979393f1c2466b4c89d10591fd05f2d6b2aa/coverage-7.10.5-cp313-cp313-win32.whl", hash = "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4", size = 219536, upload-time = "2025-08-23T14:41:27.694Z" }, - { url = "https://files.pythonhosted.org/packages/fc/54/baacb8f2f74431e3b175a9a2881feaa8feb6e2f187a0e7e3046f3c7742b2/coverage-7.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b", size = 220330, upload-time = "2025-08-23T14:41:29.081Z" }, - { url = "https://files.pythonhosted.org/packages/64/8a/82a3788f8e31dee51d350835b23d480548ea8621f3effd7c3ba3f7e5c006/coverage-7.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84", size = 218961, upload-time = "2025-08-23T14:41:30.511Z" }, - { url = "https://files.pythonhosted.org/packages/d8/a1/590154e6eae07beee3b111cc1f907c30da6fc8ce0a83ef756c72f3c7c748/coverage-7.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7", size = 217819, upload-time = "2025-08-23T14:41:31.962Z" }, - { url = "https://files.pythonhosted.org/packages/0d/ff/436ffa3cfc7741f0973c5c89405307fe39b78dcf201565b934e6616fc4ad/coverage-7.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b", size = 218040, upload-time = "2025-08-23T14:41:33.472Z" }, - { url = "https://files.pythonhosted.org/packages/a0/ca/5787fb3d7820e66273913affe8209c534ca11241eb34ee8c4fd2aaa9dd87/coverage-7.10.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae", size = 259374, upload-time = "2025-08-23T14:41:34.914Z" }, - { url = "https://files.pythonhosted.org/packages/b5/89/21af956843896adc2e64fc075eae3c1cadb97ee0a6960733e65e696f32dd/coverage-7.10.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760", size = 261551, upload-time = "2025-08-23T14:41:36.333Z" }, - { url = "https://files.pythonhosted.org/packages/e1/96/390a69244ab837e0ac137989277879a084c786cf036c3c4a3b9637d43a89/coverage-7.10.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235", size = 263776, upload-time = "2025-08-23T14:41:38.25Z" }, - { url = "https://files.pythonhosted.org/packages/00/32/cfd6ae1da0a521723349f3129b2455832fc27d3f8882c07e5b6fefdd0da2/coverage-7.10.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5", size = 261326, upload-time = "2025-08-23T14:41:40.343Z" }, - { url = "https://files.pythonhosted.org/packages/4c/c4/bf8d459fb4ce2201e9243ce6c015936ad283a668774430a3755f467b39d1/coverage-7.10.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db", size = 259090, upload-time = "2025-08-23T14:41:42.106Z" }, - { url = "https://files.pythonhosted.org/packages/f4/5d/a234f7409896468e5539d42234016045e4015e857488b0b5b5f3f3fa5f2b/coverage-7.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e", size = 260217, upload-time = "2025-08-23T14:41:43.591Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ad/87560f036099f46c2ddd235be6476dd5c1d6be6bb57569a9348d43eeecea/coverage-7.10.5-cp313-cp313t-win32.whl", hash = "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee", size = 220194, upload-time = "2025-08-23T14:41:45.051Z" }, - { url = "https://files.pythonhosted.org/packages/36/a8/04a482594fdd83dc677d4a6c7e2d62135fff5a1573059806b8383fad9071/coverage-7.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14", size = 221258, upload-time = "2025-08-23T14:41:46.44Z" }, - { url = "https://files.pythonhosted.org/packages/eb/ad/7da28594ab66fe2bc720f1bc9b131e62e9b4c6e39f044d9a48d18429cc21/coverage-7.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff", size = 219521, upload-time = "2025-08-23T14:41:47.882Z" }, - { url = "https://files.pythonhosted.org/packages/d3/7f/c8b6e4e664b8a95254c35a6c8dd0bf4db201ec681c169aae2f1256e05c85/coverage-7.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031", size = 217090, upload-time = "2025-08-23T14:41:49.327Z" }, - { url = "https://files.pythonhosted.org/packages/44/74/3ee14ede30a6e10a94a104d1d0522d5fb909a7c7cac2643d2a79891ff3b9/coverage-7.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3", size = 217365, upload-time = "2025-08-23T14:41:50.796Z" }, - { url = "https://files.pythonhosted.org/packages/41/5f/06ac21bf87dfb7620d1f870dfa3c2cae1186ccbcdc50b8b36e27a0d52f50/coverage-7.10.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031", size = 248413, upload-time = "2025-08-23T14:41:52.5Z" }, - { url = "https://files.pythonhosted.org/packages/21/bc/cc5bed6e985d3a14228539631573f3863be6a2587381e8bc5fdf786377a1/coverage-7.10.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2", size = 250943, upload-time = "2025-08-23T14:41:53.922Z" }, - { url = "https://files.pythonhosted.org/packages/8d/43/6a9fc323c2c75cd80b18d58db4a25dc8487f86dd9070f9592e43e3967363/coverage-7.10.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762", size = 252301, upload-time = "2025-08-23T14:41:56.528Z" }, - { url = "https://files.pythonhosted.org/packages/69/7c/3e791b8845f4cd515275743e3775adb86273576596dc9f02dca37357b4f2/coverage-7.10.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae", size = 250302, upload-time = "2025-08-23T14:41:58.171Z" }, - { url = "https://files.pythonhosted.org/packages/5c/bc/5099c1e1cb0c9ac6491b281babea6ebbf999d949bf4aa8cdf4f2b53505e8/coverage-7.10.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262", size = 248237, upload-time = "2025-08-23T14:41:59.703Z" }, - { url = "https://files.pythonhosted.org/packages/7e/51/d346eb750a0b2f1e77f391498b753ea906fde69cc11e4b38dca28c10c88c/coverage-7.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99", size = 249726, upload-time = "2025-08-23T14:42:01.343Z" }, - { url = "https://files.pythonhosted.org/packages/a3/85/eebcaa0edafe427e93286b94f56ea7e1280f2c49da0a776a6f37e04481f9/coverage-7.10.5-cp314-cp314-win32.whl", hash = "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde", size = 219825, upload-time = "2025-08-23T14:42:03.263Z" }, - { url = "https://files.pythonhosted.org/packages/3c/f7/6d43e037820742603f1e855feb23463979bf40bd27d0cde1f761dcc66a3e/coverage-7.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13", size = 220618, upload-time = "2025-08-23T14:42:05.037Z" }, - { url = "https://files.pythonhosted.org/packages/4a/b0/ed9432e41424c51509d1da603b0393404b828906236fb87e2c8482a93468/coverage-7.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9", size = 219199, upload-time = "2025-08-23T14:42:06.662Z" }, - { url = "https://files.pythonhosted.org/packages/2f/54/5a7ecfa77910f22b659c820f67c16fc1e149ed132ad7117f0364679a8fa9/coverage-7.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508", size = 217833, upload-time = "2025-08-23T14:42:08.262Z" }, - { url = "https://files.pythonhosted.org/packages/4e/0e/25672d917cc57857d40edf38f0b867fb9627115294e4f92c8fcbbc18598d/coverage-7.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357", size = 218048, upload-time = "2025-08-23T14:42:10.247Z" }, - { url = "https://files.pythonhosted.org/packages/cb/7c/0b2b4f1c6f71885d4d4b2b8608dcfc79057adb7da4143eb17d6260389e42/coverage-7.10.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b", size = 259549, upload-time = "2025-08-23T14:42:11.811Z" }, - { url = "https://files.pythonhosted.org/packages/94/73/abb8dab1609abec7308d83c6aec547944070526578ee6c833d2da9a0ad42/coverage-7.10.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4", size = 261715, upload-time = "2025-08-23T14:42:13.505Z" }, - { url = "https://files.pythonhosted.org/packages/0b/d1/abf31de21ec92731445606b8d5e6fa5144653c2788758fcf1f47adb7159a/coverage-7.10.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba", size = 263969, upload-time = "2025-08-23T14:42:15.422Z" }, - { url = "https://files.pythonhosted.org/packages/9c/b3/ef274927f4ebede96056173b620db649cc9cb746c61ffc467946b9d0bc67/coverage-7.10.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842", size = 261408, upload-time = "2025-08-23T14:42:16.971Z" }, - { url = "https://files.pythonhosted.org/packages/20/fc/83ca2812be616d69b4cdd4e0c62a7bc526d56875e68fd0f79d47c7923584/coverage-7.10.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874", size = 259168, upload-time = "2025-08-23T14:42:18.512Z" }, - { url = "https://files.pythonhosted.org/packages/fc/4f/e0779e5716f72d5c9962e709d09815d02b3b54724e38567308304c3fc9df/coverage-7.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732", size = 260317, upload-time = "2025-08-23T14:42:20.005Z" }, - { url = "https://files.pythonhosted.org/packages/2b/fe/4247e732f2234bb5eb9984a0888a70980d681f03cbf433ba7b48f08ca5d5/coverage-7.10.5-cp314-cp314t-win32.whl", hash = "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df", size = 220600, upload-time = "2025-08-23T14:42:22.027Z" }, - { url = "https://files.pythonhosted.org/packages/a7/a0/f294cff6d1034b87839987e5b6ac7385bec599c44d08e0857ac7f164ad0c/coverage-7.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f", size = 221714, upload-time = "2025-08-23T14:42:23.616Z" }, - { url = "https://files.pythonhosted.org/packages/23/18/fa1afdc60b5528d17416df440bcbd8fd12da12bfea9da5b6ae0f7a37d0f7/coverage-7.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2", size = 219735, upload-time = "2025-08-23T14:42:25.156Z" }, - { url = "https://files.pythonhosted.org/packages/08/b6/fff6609354deba9aeec466e4bcaeb9d1ed3e5d60b14b57df2a36fb2273f2/coverage-7.10.5-py3-none-any.whl", hash = "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a", size = 208736, upload-time = "2025-08-23T14:42:43.145Z" }, +version = "7.10.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a", size = 217987, upload-time = "2025-09-21T20:00:57.218Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5", size = 218388, upload-time = "2025-09-21T20:01:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17", size = 245148, upload-time = "2025-09-21T20:01:01.768Z" }, + { url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b", size = 246958, upload-time = "2025-09-21T20:01:03.355Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87", size = 248819, upload-time = "2025-09-21T20:01:04.968Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e", size = 245754, upload-time = "2025-09-21T20:01:06.321Z" }, + { url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e", size = 246860, upload-time = "2025-09-21T20:01:07.605Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df", size = 244877, upload-time = "2025-09-21T20:01:08.829Z" }, + { url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0", size = 245108, upload-time = "2025-09-21T20:01:10.527Z" }, + { url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13", size = 245752, upload-time = "2025-09-21T20:01:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b", size = 220497, upload-time = "2025-09-21T20:01:13.459Z" }, + { url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807", size = 221392, upload-time = "2025-09-21T20:01:14.722Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, + { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, + { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, + { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, + { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, + { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, + { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, + { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, + { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, + { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, + { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, + { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, + { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320, upload-time = "2025-09-21T20:01:56.629Z" }, + { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575, upload-time = "2025-09-21T20:01:58.203Z" }, + { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568, upload-time = "2025-09-21T20:01:59.748Z" }, + { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174, upload-time = "2025-09-21T20:02:01.192Z" }, + { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447, upload-time = "2025-09-21T20:02:02.701Z" }, + { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779, upload-time = "2025-09-21T20:02:04.185Z" }, + { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604, upload-time = "2025-09-21T20:02:06.034Z" }, + { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497, upload-time = "2025-09-21T20:02:07.619Z" }, + { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350, upload-time = "2025-09-21T20:02:10.34Z" }, + { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111, upload-time = "2025-09-21T20:02:12.122Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746, upload-time = "2025-09-21T20:02:13.919Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541, upload-time = "2025-09-21T20:02:15.57Z" }, + { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170, upload-time = "2025-09-21T20:02:17.395Z" }, + { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029, upload-time = "2025-09-21T20:02:18.936Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259, upload-time = "2025-09-21T20:02:20.44Z" }, + { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592, upload-time = "2025-09-21T20:02:22.313Z" }, + { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768, upload-time = "2025-09-21T20:02:24.287Z" }, + { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995, upload-time = "2025-09-21T20:02:26.133Z" }, + { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546, upload-time = "2025-09-21T20:02:27.716Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544, upload-time = "2025-09-21T20:02:29.216Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308, upload-time = "2025-09-21T20:02:31.226Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920, upload-time = "2025-09-21T20:02:32.823Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434, upload-time = "2025-09-21T20:02:34.86Z" }, + { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, + { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520", size = 218302, upload-time = "2025-09-21T20:02:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32", size = 218578, upload-time = "2025-09-21T20:02:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f", size = 249629, upload-time = "2025-09-21T20:02:46.503Z" }, + { url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a", size = 252162, upload-time = "2025-09-21T20:02:48.689Z" }, + { url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360", size = 253517, upload-time = "2025-09-21T20:02:50.31Z" }, + { url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69", size = 249632, upload-time = "2025-09-21T20:02:51.971Z" }, + { url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14", size = 251520, upload-time = "2025-09-21T20:02:53.858Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe", size = 249455, upload-time = "2025-09-21T20:02:55.807Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e", size = 249287, upload-time = "2025-09-21T20:02:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd", size = 250946, upload-time = "2025-09-21T20:02:59.431Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2", size = 221009, upload-time = "2025-09-21T20:03:01.324Z" }, + { url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681", size = 221804, upload-time = "2025-09-21T20:03:03.4Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880", size = 220384, upload-time = "2025-09-21T20:03:05.111Z" }, + { url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63", size = 219047, upload-time = "2025-09-21T20:03:06.795Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2", size = 219266, upload-time = "2025-09-21T20:03:08.495Z" }, + { url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d", size = 260767, upload-time = "2025-09-21T20:03:10.172Z" }, + { url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0", size = 262931, upload-time = "2025-09-21T20:03:11.861Z" }, + { url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699", size = 265186, upload-time = "2025-09-21T20:03:13.539Z" }, + { url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9", size = 259470, upload-time = "2025-09-21T20:03:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f", size = 262626, upload-time = "2025-09-21T20:03:17.673Z" }, + { url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1", size = 260386, upload-time = "2025-09-21T20:03:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0", size = 258852, upload-time = "2025-09-21T20:03:21.007Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399", size = 261534, upload-time = "2025-09-21T20:03:23.12Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235", size = 221784, upload-time = "2025-09-21T20:03:24.769Z" }, + { url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d", size = 222905, upload-time = "2025-09-21T20:03:26.93Z" }, + { url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a", size = 220922, upload-time = "2025-09-21T20:03:28.672Z" }, + { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, ] [package.optional-dependencies] @@ -446,20 +473,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fe/47/290daabcf91628f4fc0e17c75a1690b354ba067066cd14407712600e609f/dict2css-0.3.0.post1-py3-none-any.whl", hash = "sha256:f006a6b774c3e31869015122ae82c491fd25e7de4a75607a62aa3e798f837e0d", size = 25647, upload-time = "2023-11-22T11:09:19.221Z" }, ] -[[package]] -name = "docker" -version = "7.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pywin32", marker = "sys_platform == 'win32'" }, - { name = "requests" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, -] - [[package]] name = "docutils" version = "0.21.2" @@ -508,7 +521,7 @@ name = "exceptiongroup" version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } wheels = [ @@ -526,16 +539,16 @@ wheels = [ [[package]] name = "fastapi" -version = "0.116.1" +version = "0.117.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/7e/d9788300deaf416178f61fb3c2ceb16b7d0dc9f82a08fdb87a5e64ee3cc7/fastapi-0.117.1.tar.gz", hash = "sha256:fb2d42082d22b185f904ca0ecad2e195b851030bd6c5e4c032d1c981240c631a", size = 307155, upload-time = "2025-09-20T20:16:56.663Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, + { url = "https://files.pythonhosted.org/packages/6d/45/d9d3e8eeefbe93be1c50060a9d9a9f366dba66f288bb518a9566a23a8631/fastapi-0.117.1-py3-none-any.whl", hash = "sha256:33c51a0d21cab2b9722d4e56dbb9316f3687155be6b276191790d8da03507552", size = 95959, upload-time = "2025-09-20T20:16:53.661Z" }, ] [[package]] @@ -812,11 +825,11 @@ wheels = [ [[package]] name = "more-itertools" -version = "10.7.0" +version = "10.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ce/a0/834b0cebabbfc7e311f30b46c8188790a37f89fc8d756660346fe5abfd09/more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3", size = 127671, upload-time = "2025-04-22T14:17:41.838Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278, upload-time = "2025-04-22T14:17:40.49Z" }, + { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" }, ] [[package]] @@ -885,6 +898,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] +[[package]] +name = "outcome" +version = "1.3.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060, upload-time = "2023-10-26T04:26:04.361Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692, upload-time = "2023-10-26T04:26:02.532Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -912,6 +937,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + [[package]] name = "pycron" version = "3.2.0" @@ -923,7 +957,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -931,9 +965,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, ] [[package]] @@ -1059,20 +1093,20 @@ wheels = [ [[package]] name = "pyright" -version = "1.1.404" +version = "1.1.405" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodeenv" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e2/6e/026be64c43af681d5632722acd100b06d3d39f383ec382ff50a71a6d5bce/pyright-1.1.404.tar.gz", hash = "sha256:455e881a558ca6be9ecca0b30ce08aa78343ecc031d37a198ffa9a7a1abeb63e", size = 4065679, upload-time = "2025-08-20T18:46:14.029Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/6c/ba4bbee22e76af700ea593a1d8701e3225080956753bee9750dcc25e2649/pyright-1.1.405.tar.gz", hash = "sha256:5c2a30e1037af27eb463a1cc0b9f6d65fec48478ccf092c1ac28385a15c55763", size = 4068319, upload-time = "2025-09-04T03:37:06.776Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/30/89aa7f7d7a875bbb9a577d4b1dc5a3e404e3d2ae2657354808e905e358e0/pyright-1.1.404-py3-none-any.whl", hash = "sha256:c7b7ff1fdb7219c643079e4c3e7d4125f0dafcc19d253b47e898d130ea426419", size = 5902951, upload-time = "2025-08-20T18:46:12.096Z" }, + { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, ] [[package]] name = "pytest" -version = "8.4.1" +version = "8.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -1083,23 +1117,23 @@ dependencies = [ { name = "pygments" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, ] [[package]] name = "pytest-cov" -version = "6.2.1" +version = "7.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage", extra = ["toml"] }, { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" }, + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, ] [[package]] @@ -1127,15 +1161,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] -[[package]] -name = "python-dotenv" -version = "1.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, -] - [[package]] name = "python-multipart" version = "0.0.20" @@ -1244,72 +1269,80 @@ wheels = [ [[package]] name = "ruamel-yaml-clib" -version = "0.2.12" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/84/80203abff8ea4993a87d823a5f632e4d92831ef75d404c9fc78d0176d2b5/ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f", size = 225315, upload-time = "2024-10-20T10:10:56.22Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/70/57/40a958e863e299f0c74ef32a3bde9f2d1ea8d69669368c0c502a0997f57f/ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5", size = 131301, upload-time = "2024-10-20T10:12:35.876Z" }, - { url = "https://files.pythonhosted.org/packages/98/a8/29a3eb437b12b95f50a6bcc3d7d7214301c6c529d8fdc227247fa84162b5/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969", size = 633728, upload-time = "2024-10-20T10:12:37.858Z" }, - { url = "https://files.pythonhosted.org/packages/35/6d/ae05a87a3ad540259c3ad88d71275cbd1c0f2d30ae04c65dcbfb6dcd4b9f/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df", size = 722230, upload-time = "2024-10-20T10:12:39.457Z" }, - { url = "https://files.pythonhosted.org/packages/7f/b7/20c6f3c0b656fe609675d69bc135c03aac9e3865912444be6339207b6648/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76", size = 686712, upload-time = "2024-10-20T10:12:41.119Z" }, - { url = "https://files.pythonhosted.org/packages/cd/11/d12dbf683471f888d354dac59593873c2b45feb193c5e3e0f2ebf85e68b9/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6", size = 663936, upload-time = "2024-10-21T11:26:37.419Z" }, - { url = "https://files.pythonhosted.org/packages/72/14/4c268f5077db5c83f743ee1daeb236269fa8577133a5cfa49f8b382baf13/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd", size = 696580, upload-time = "2024-10-21T11:26:39.503Z" }, - { url = "https://files.pythonhosted.org/packages/30/fc/8cd12f189c6405a4c1cf37bd633aa740a9538c8e40497c231072d0fef5cf/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a", size = 663393, upload-time = "2024-12-11T19:58:13.873Z" }, - { url = "https://files.pythonhosted.org/packages/80/29/c0a017b704aaf3cbf704989785cd9c5d5b8ccec2dae6ac0c53833c84e677/ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da", size = 100326, upload-time = "2024-10-20T10:12:42.967Z" }, - { url = "https://files.pythonhosted.org/packages/3a/65/fa39d74db4e2d0cd252355732d966a460a41cd01c6353b820a0952432839/ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28", size = 118079, upload-time = "2024-10-20T10:12:44.117Z" }, - { url = "https://files.pythonhosted.org/packages/fb/8f/683c6ad562f558cbc4f7c029abcd9599148c51c54b5ef0f24f2638da9fbb/ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6", size = 132224, upload-time = "2024-10-20T10:12:45.162Z" }, - { url = "https://files.pythonhosted.org/packages/3c/d2/b79b7d695e2f21da020bd44c782490578f300dd44f0a4c57a92575758a76/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e", size = 641480, upload-time = "2024-10-20T10:12:46.758Z" }, - { url = "https://files.pythonhosted.org/packages/68/6e/264c50ce2a31473a9fdbf4fa66ca9b2b17c7455b31ef585462343818bd6c/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e", size = 739068, upload-time = "2024-10-20T10:12:48.605Z" }, - { url = "https://files.pythonhosted.org/packages/86/29/88c2567bc893c84d88b4c48027367c3562ae69121d568e8a3f3a8d363f4d/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52", size = 703012, upload-time = "2024-10-20T10:12:51.124Z" }, - { url = "https://files.pythonhosted.org/packages/11/46/879763c619b5470820f0cd6ca97d134771e502776bc2b844d2adb6e37753/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642", size = 704352, upload-time = "2024-10-21T11:26:41.438Z" }, - { url = "https://files.pythonhosted.org/packages/02/80/ece7e6034256a4186bbe50dee28cd032d816974941a6abf6a9d65e4228a7/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2", size = 737344, upload-time = "2024-10-21T11:26:43.62Z" }, - { url = "https://files.pythonhosted.org/packages/f0/ca/e4106ac7e80efbabdf4bf91d3d32fc424e41418458251712f5672eada9ce/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3", size = 714498, upload-time = "2024-12-11T19:58:15.592Z" }, - { url = "https://files.pythonhosted.org/packages/67/58/b1f60a1d591b771298ffa0428237afb092c7f29ae23bad93420b1eb10703/ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4", size = 100205, upload-time = "2024-10-20T10:12:52.865Z" }, - { url = "https://files.pythonhosted.org/packages/b4/4f/b52f634c9548a9291a70dfce26ca7ebce388235c93588a1068028ea23fcc/ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb", size = 118185, upload-time = "2024-10-20T10:12:54.652Z" }, - { url = "https://files.pythonhosted.org/packages/48/41/e7a405afbdc26af961678474a55373e1b323605a4f5e2ddd4a80ea80f628/ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632", size = 133433, upload-time = "2024-10-20T10:12:55.657Z" }, - { url = "https://files.pythonhosted.org/packages/ec/b0/b850385604334c2ce90e3ee1013bd911aedf058a934905863a6ea95e9eb4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d", size = 647362, upload-time = "2024-10-20T10:12:57.155Z" }, - { url = "https://files.pythonhosted.org/packages/44/d0/3f68a86e006448fb6c005aee66565b9eb89014a70c491d70c08de597f8e4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c", size = 754118, upload-time = "2024-10-20T10:12:58.501Z" }, - { url = "https://files.pythonhosted.org/packages/52/a9/d39f3c5ada0a3bb2870d7db41901125dbe2434fa4f12ca8c5b83a42d7c53/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd", size = 706497, upload-time = "2024-10-20T10:13:00.211Z" }, - { url = "https://files.pythonhosted.org/packages/b0/fa/097e38135dadd9ac25aecf2a54be17ddf6e4c23e43d538492a90ab3d71c6/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31", size = 698042, upload-time = "2024-10-21T11:26:46.038Z" }, - { url = "https://files.pythonhosted.org/packages/ec/d5/a659ca6f503b9379b930f13bc6b130c9f176469b73b9834296822a83a132/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680", size = 745831, upload-time = "2024-10-21T11:26:47.487Z" }, - { url = "https://files.pythonhosted.org/packages/db/5d/36619b61ffa2429eeaefaab4f3374666adf36ad8ac6330d855848d7d36fd/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d", size = 715692, upload-time = "2024-12-11T19:58:17.252Z" }, - { url = "https://files.pythonhosted.org/packages/b1/82/85cb92f15a4231c89b95dfe08b09eb6adca929ef7df7e17ab59902b6f589/ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5", size = 98777, upload-time = "2024-10-20T10:13:01.395Z" }, - { url = "https://files.pythonhosted.org/packages/d7/8f/c3654f6f1ddb75daf3922c3d8fc6005b1ab56671ad56ffb874d908bfa668/ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4", size = 115523, upload-time = "2024-10-20T10:13:02.768Z" }, - { url = "https://files.pythonhosted.org/packages/29/00/4864119668d71a5fa45678f380b5923ff410701565821925c69780356ffa/ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a", size = 132011, upload-time = "2024-10-20T10:13:04.377Z" }, - { url = "https://files.pythonhosted.org/packages/7f/5e/212f473a93ae78c669ffa0cb051e3fee1139cb2d385d2ae1653d64281507/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475", size = 642488, upload-time = "2024-10-20T10:13:05.906Z" }, - { url = "https://files.pythonhosted.org/packages/1f/8f/ecfbe2123ade605c49ef769788f79c38ddb1c8fa81e01f4dbf5cf1a44b16/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef", size = 745066, upload-time = "2024-10-20T10:13:07.26Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a9/28f60726d29dfc01b8decdb385de4ced2ced9faeb37a847bd5cf26836815/ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6", size = 701785, upload-time = "2024-10-20T10:13:08.504Z" }, - { url = "https://files.pythonhosted.org/packages/84/7e/8e7ec45920daa7f76046578e4f677a3215fe8f18ee30a9cb7627a19d9b4c/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf", size = 693017, upload-time = "2024-10-21T11:26:48.866Z" }, - { url = "https://files.pythonhosted.org/packages/c5/b3/d650eaade4ca225f02a648321e1ab835b9d361c60d51150bac49063b83fa/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1", size = 741270, upload-time = "2024-10-21T11:26:50.213Z" }, - { url = "https://files.pythonhosted.org/packages/87/b8/01c29b924dcbbed75cc45b30c30d565d763b9c4d540545a0eeecffb8f09c/ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01", size = 709059, upload-time = "2024-12-11T19:58:18.846Z" }, - { url = "https://files.pythonhosted.org/packages/30/8c/ed73f047a73638257aa9377ad356bea4d96125b305c34a28766f4445cc0f/ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6", size = 98583, upload-time = "2024-10-20T10:13:09.658Z" }, - { url = "https://files.pythonhosted.org/packages/b0/85/e8e751d8791564dd333d5d9a4eab0a7a115f7e349595417fd50ecae3395c/ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3", size = 115190, upload-time = "2024-10-20T10:13:10.66Z" }, +version = "0.2.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/e9/39ec4d4b3f91188fad1842748f67d4e749c77c37e353c4e545052ee8e893/ruamel.yaml.clib-0.2.14.tar.gz", hash = "sha256:803f5044b13602d58ea378576dd75aa759f52116a0232608e8fdada4da33752e", size = 225394, upload-time = "2025-09-22T19:51:23.753Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/56/35a0a752415ae01992c68f5a6513bdef0e1b6fbdb60d7619342ce12346a0/ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f8b2acb0ffdd2ce8208accbec2dca4a06937d556fdcaefd6473ba1b5daa7e3c4", size = 269216, upload-time = "2025-09-23T14:24:09.742Z" }, + { url = "https://files.pythonhosted.org/packages/98/6a/9a68184ab93619f4607ff1675e4ef01e8accfcbff0d482f4ca44c10d8eab/ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:aef953f3b8bd0b50bd52a2e52fb54a6a2171a1889d8dea4a5959d46c6624c451", size = 137092, upload-time = "2025-09-22T19:50:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/2b/3f/cfed5f088628128a9ec66f46794fd4d165642155c7b78c26d83b16c6bf7b/ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a0ac90efbc7a77b0d796c03c8cc4e62fd710b3f1e4c32947713ef2ef52e09543", size = 633768, upload-time = "2025-09-22T19:50:31.228Z" }, + { url = "https://files.pythonhosted.org/packages/3a/d5/5ce2cc156c1da48160171968d91f066d305840fbf930ee955a509d025a44/ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bf6b699223afe6c7fe9f2ef76e0bfa6dd892c21e94ce8c957478987ade76cd8", size = 721253, upload-time = "2025-09-22T19:50:28.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/71/d0b56bc902b38ebe4be8e270f730f929eec4edaf8a0fa7028f4ef64fa950/ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73a0187718f6eec5b2f729b0f98e4603f7bd9c48aa65d01227d1a5dcdfbe9e8", size = 683823, upload-time = "2025-09-22T19:50:29.993Z" }, + { url = "https://files.pythonhosted.org/packages/4b/db/1f37449dd89c540218598316ccafc1a0aed60215e72efa315c5367cfd015/ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81f6d3b19bc703679a5705c6a16dabdc79823c71d791d73c65949be7f3012c02", size = 690370, upload-time = "2025-09-23T18:42:46.797Z" }, + { url = "https://files.pythonhosted.org/packages/5d/53/c498b30f35efcd9f47cb084d7ad9374f2b907470f73913dec6396b81397d/ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b28caeaf3e670c08cb7e8de221266df8494c169bd6ed8875493fab45be9607a4", size = 703578, upload-time = "2025-09-22T19:50:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/34/79/492cfad9baed68914840c39e5f3c1cc251f51a897ddb3f532601215cbb12/ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:94f3efb718f8f49b031f2071ec7a27dd20cbfe511b4dfd54ecee54c956da2b31", size = 722544, upload-time = "2025-09-22T19:50:34.157Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f5/479ebfd5ba396e209ade90f7282d84b90c57b3e07be8dc6fcd02a6df7ffc/ruamel.yaml.clib-0.2.14-cp310-cp310-win32.whl", hash = "sha256:27c070cf3888e90d992be75dd47292ff9aa17dafd36492812a6a304a1aedc182", size = 100375, upload-time = "2025-09-22T19:50:36.832Z" }, + { url = "https://files.pythonhosted.org/packages/57/31/a044520fdb3bd409889f67f1efebda0658033c7ab3f390cee37531cc9a9e/ruamel.yaml.clib-0.2.14-cp310-cp310-win_amd64.whl", hash = "sha256:4f4a150a737fccae13fb51234d41304ff2222e3b7d4c8e9428ed1a6ab48389b8", size = 118129, upload-time = "2025-09-22T19:50:35.545Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/3c51e9578b8c36fcc4bdd271a1a5bb65963a74a4b6ad1a989768a22f6c2a/ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5bae1a073ca4244620425cd3d3aa9746bde590992b98ee8c7c8be8c597ca0d4e", size = 270207, upload-time = "2025-09-23T14:24:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/4a/16/cb02815bc2ae9c66760c0c061d23c7358f9ba51dae95ac85247662b7fbe2/ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:0a54e5e40a7a691a426c2703b09b0d61a14294d25cfacc00631aa6f9c964df0d", size = 137780, upload-time = "2025-09-22T19:50:37.734Z" }, + { url = "https://files.pythonhosted.org/packages/31/c6/fc687cd1b93bff8e40861eea46d6dc1a6a778d9a085684e4045ff26a8e40/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:10d9595b6a19778f3269399eff6bab642608e5966183abc2adbe558a42d4efc9", size = 641590, upload-time = "2025-09-22T19:50:41.978Z" }, + { url = "https://files.pythonhosted.org/packages/45/5d/65a2bc08b709b08576b3f307bf63951ee68a8e047cbbda6f1c9864ecf9a7/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba72975485f2b87b786075e18a6e5d07dc2b4d8973beb2732b9b2816f1bad70", size = 738090, upload-time = "2025-09-22T19:50:39.152Z" }, + { url = "https://files.pythonhosted.org/packages/fb/d0/a70a03614d9a6788a3661ab1538879ed2aae4e84d861f101243116308a37/ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29757bdb7c142f9595cc1b62ec49a3d1c83fab9cef92db52b0ccebaad4eafb98", size = 700744, upload-time = "2025-09-22T19:50:40.811Z" }, + { url = "https://files.pythonhosted.org/packages/77/30/c93fa457611f79946d5cb6cc97493ca5425f3f21891d7b1f9b44eaa1b38e/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:557df28dbccf79b152fe2d1b935f6063d9cc431199ea2b0e84892f35c03bb0ee", size = 742321, upload-time = "2025-09-23T18:42:48.916Z" }, + { url = "https://files.pythonhosted.org/packages/40/85/e2c54ad637117cd13244a4649946eaa00f32edcb882d1f92df90e079ab00/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:26a8de280ab0d22b6e3ec745b4a5a07151a0f74aad92dd76ab9c8d8d7087720d", size = 743805, upload-time = "2025-09-22T19:50:43.58Z" }, + { url = "https://files.pythonhosted.org/packages/81/50/f899072c38877d8ef5382e0b3d47f8c4346226c1f52d6945d6f64fec6a2f/ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e501c096aa3889133d674605ebd018471bc404a59cbc17da3c5924421c54d97c", size = 769529, upload-time = "2025-09-22T19:50:45.707Z" }, + { url = "https://files.pythonhosted.org/packages/99/7c/96d4b5075e30c65ea2064e40c2d657c7c235d7b6ef18751cf89a935b9041/ruamel.yaml.clib-0.2.14-cp311-cp311-win32.whl", hash = "sha256:915748cfc25b8cfd81b14d00f4bfdb2ab227a30d6d43459034533f4d1c207a2a", size = 100256, upload-time = "2025-09-22T19:50:48.26Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8c/73ee2babd04e8bfcf1fd5c20aa553d18bf0ebc24b592b4f831d12ae46cc0/ruamel.yaml.clib-0.2.14-cp311-cp311-win_amd64.whl", hash = "sha256:4ccba93c1e5a40af45b2f08e4591969fa4697eae951c708f3f83dcbf9f6c6bb1", size = 118234, upload-time = "2025-09-22T19:50:47.019Z" }, + { url = "https://files.pythonhosted.org/packages/b4/42/ccfb34a25289afbbc42017e4d3d4288e61d35b2e00cfc6b92974a6a1f94b/ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:6aeadc170090ff1889f0d2c3057557f9cd71f975f17535c26a5d37af98f19c27", size = 271775, upload-time = "2025-09-23T14:24:12.771Z" }, + { url = "https://files.pythonhosted.org/packages/82/73/e628a92e80197ff6a79ab81ec3fa00d4cc082d58ab78d3337b7ba7043301/ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5e56ac47260c0eed992789fa0b8efe43404a9adb608608631a948cee4fc2b052", size = 138842, upload-time = "2025-09-22T19:50:49.156Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c5/346c7094344a60419764b4b1334d9e0285031c961176ff88ffb652405b0c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:a911aa73588d9a8b08d662b9484bc0567949529824a55d3885b77e8dd62a127a", size = 647404, upload-time = "2025-09-22T19:50:52.921Z" }, + { url = "https://files.pythonhosted.org/packages/df/99/65080c863eb06d4498de3d6c86f3e90595e02e159fd8529f1565f56cfe2c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a05ba88adf3d7189a974b2de7a9d56731548d35dc0a822ec3dc669caa7019b29", size = 753141, upload-time = "2025-09-22T19:50:50.294Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e3/0de85f3e3333f8e29e4b10244374a202a87665d1131798946ee22cf05c7c/ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb04c5650de6668b853623eceadcdb1a9f2fee381f5d7b6bc842ee7c239eeec4", size = 703477, upload-time = "2025-09-22T19:50:51.508Z" }, + { url = "https://files.pythonhosted.org/packages/d9/25/0d2f09d8833c7fd77ab8efeff213093c16856479a9d293180a0d89f6bed9/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:df3ec9959241d07bc261f4983d25a1205ff37703faf42b474f15d54d88b4f8c9", size = 741157, upload-time = "2025-09-23T18:42:50.408Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8c/959f10c2e2153cbdab834c46e6954b6dd9e3b109c8f8c0a3cf1618310985/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fbc08c02e9b147a11dfcaa1ac8a83168b699863493e183f7c0c8b12850b7d259", size = 745859, upload-time = "2025-09-22T19:50:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/ed/6b/e580a7c18b485e1a5f30a32cda96b20364b0ba649d9d2baaf72f8bd21f83/ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c099cafc1834d3c5dac305865d04235f7c21c167c8dd31ebc3d6bbc357e2f023", size = 770200, upload-time = "2025-09-22T19:50:55.718Z" }, + { url = "https://files.pythonhosted.org/packages/ef/44/3455eebc761dc8e8fdced90f2b0a3fa61e32ba38b50de4130e2d57db0f21/ruamel.yaml.clib-0.2.14-cp312-cp312-win32.whl", hash = "sha256:b5b0f7e294700b615a3bcf6d28b26e6da94e8eba63b079f4ec92e9ba6c0d6b54", size = 98829, upload-time = "2025-09-22T19:50:58.895Z" }, + { url = "https://files.pythonhosted.org/packages/76/ab/5121f7f3b651db93de546f8c982c241397aad0a4765d793aca1dac5eadee/ruamel.yaml.clib-0.2.14-cp312-cp312-win_amd64.whl", hash = "sha256:a37f40a859b503304dd740686359fcf541d6fb3ff7fc10f539af7f7150917c68", size = 115570, upload-time = "2025-09-22T19:50:57.981Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ae/e3811f05415594025e96000349d3400978adaed88d8f98d494352d9761ee/ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7e4f9da7e7549946e02a6122dcad00b7c1168513acb1f8a726b1aaf504a99d32", size = 269205, upload-time = "2025-09-23T14:24:15.06Z" }, + { url = "https://files.pythonhosted.org/packages/72/06/7d51f4688d6d72bb72fa74254e1593c4f5ebd0036be5b41fe39315b275e9/ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:dd7546c851e59c06197a7c651335755e74aa383a835878ca86d2c650c07a2f85", size = 137417, upload-time = "2025-09-22T19:50:59.82Z" }, + { url = "https://files.pythonhosted.org/packages/5a/08/b4499234a420ef42960eeb05585df5cc7eb25ccb8c980490b079e6367050/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:1c1acc3a0209ea9042cc3cfc0790edd2eddd431a2ec3f8283d081e4d5018571e", size = 642558, upload-time = "2025-09-22T19:51:03.388Z" }, + { url = "https://files.pythonhosted.org/packages/b6/ba/1975a27dedf1c4c33306ee67c948121be8710b19387aada29e2f139c43ee/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2070bf0ad1540d5c77a664de07ebcc45eebd1ddcab71a7a06f26936920692beb", size = 744087, upload-time = "2025-09-22T19:51:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/20/15/8a19a13d27f3bd09fa18813add8380a29115a47b553845f08802959acbce/ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd8fe07f49c170e09d76773fb86ad9135e0beee44f36e1576a201b0676d3d1d", size = 699709, upload-time = "2025-09-22T19:51:02.075Z" }, + { url = "https://files.pythonhosted.org/packages/19/ee/8d6146a079ad21e534b5083c9ee4a4c8bec42f79cf87594b60978286b39a/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ff86876889ea478b1381089e55cf9e345707b312beda4986f823e1d95e8c0f59", size = 708926, upload-time = "2025-09-23T18:42:51.707Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/426b714abdc222392e68f3b8ad323930d05a214a27c7e7a0f06c69126401/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1f118b707eece8cf84ecbc3e3ec94d9db879d85ed608f95870d39b2d2efa5dca", size = 740202, upload-time = "2025-09-22T19:51:04.673Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ac/3c5c2b27a183f4fda8a57c82211721c016bcb689a4a175865f7646db9f94/ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b30110b29484adc597df6bd92a37b90e63a8c152ca8136aad100a02f8ba6d1b6", size = 765196, upload-time = "2025-09-22T19:51:05.916Z" }, + { url = "https://files.pythonhosted.org/packages/92/2e/06f56a71fd55021c993ed6e848c9b2e5e9cfce180a42179f0ddd28253f7c/ruamel.yaml.clib-0.2.14-cp313-cp313-win32.whl", hash = "sha256:f4e97a1cf0b7a30af9e1d9dad10a5671157b9acee790d9e26996391f49b965a2", size = 98635, upload-time = "2025-09-22T19:51:08.183Z" }, + { url = "https://files.pythonhosted.org/packages/51/79/76aba16a1689b50528224b182f71097ece338e7a4ab55e84c2e73443b78a/ruamel.yaml.clib-0.2.14-cp313-cp313-win_amd64.whl", hash = "sha256:090782b5fb9d98df96509eecdbcaffd037d47389a89492320280d52f91330d78", size = 115238, upload-time = "2025-09-22T19:51:07.081Z" }, + { url = "https://files.pythonhosted.org/packages/21/e2/a59ff65c26aaf21a24eb38df777cb9af5d87ba8fc8107c163c2da9d1e85e/ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:7df6f6e9d0e33c7b1d435defb185095386c469109de723d514142632a7b9d07f", size = 271441, upload-time = "2025-09-23T14:24:16.498Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fa/3234f913fe9a6525a7b97c6dad1f51e72b917e6872e051a5e2ffd8b16fbb/ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:70eda7703b8126f5e52fcf276e6c0f40b0d314674f896fc58c47b0aef2b9ae83", size = 137970, upload-time = "2025-09-22T19:51:09.472Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ec/4edbf17ac2c87fa0845dd366ef8d5852b96eb58fcd65fc1ecf5fe27b4641/ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a0cb71ccc6ef9ce36eecb6272c81afdc2f565950cdcec33ae8e6cd8f7fc86f27", size = 739639, upload-time = "2025-09-22T19:51:10.566Z" }, + { url = "https://files.pythonhosted.org/packages/15/18/b0e1fafe59051de9e79cdd431863b03593ecfa8341c110affad7c8121efc/ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e7cb9ad1d525d40f7d87b6df7c0ff916a66bc52cb61b66ac1b2a16d0c1b07640", size = 764456, upload-time = "2025-09-22T19:51:11.736Z" }, ] [[package]] name = "ruff" -version = "0.12.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3b/eb/8c073deb376e46ae767f4961390d17545e8535921d2f65101720ed8bd434/ruff-0.12.10.tar.gz", hash = "sha256:189ab65149d11ea69a2d775343adf5f49bb2426fc4780f65ee33b423ad2e47f9", size = 5310076, upload-time = "2025-08-21T18:23:22.595Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/24/e7/560d049d15585d6c201f9eeacd2fd130def3741323e5ccf123786e0e3c95/ruff-0.12.10-py3-none-linux_armv6l.whl", hash = "sha256:8b593cb0fb55cc8692dac7b06deb29afda78c721c7ccfed22db941201b7b8f7b", size = 11935161, upload-time = "2025-08-21T18:22:26.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/b0/ad2464922a1113c365d12b8f80ed70fcfb39764288ac77c995156080488d/ruff-0.12.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ebb7333a45d56efc7c110a46a69a1b32365d5c5161e7244aaf3aa20ce62399c1", size = 12660884, upload-time = "2025-08-21T18:22:30.925Z" }, - { url = "https://files.pythonhosted.org/packages/d7/f1/97f509b4108d7bae16c48389f54f005b62ce86712120fd8b2d8e88a7cb49/ruff-0.12.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d59e58586829f8e4a9920788f6efba97a13d1fa320b047814e8afede381c6839", size = 11872754, upload-time = "2025-08-21T18:22:34.035Z" }, - { url = "https://files.pythonhosted.org/packages/12/ad/44f606d243f744a75adc432275217296095101f83f966842063d78eee2d3/ruff-0.12.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:822d9677b560f1fdeab69b89d1f444bf5459da4aa04e06e766cf0121771ab844", size = 12092276, upload-time = "2025-08-21T18:22:36.764Z" }, - { url = "https://files.pythonhosted.org/packages/06/1f/ed6c265e199568010197909b25c896d66e4ef2c5e1c3808caf461f6f3579/ruff-0.12.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b4a64f4062a50c75019c61c7017ff598cb444984b638511f48539d3a1c98db", size = 11734700, upload-time = "2025-08-21T18:22:39.822Z" }, - { url = "https://files.pythonhosted.org/packages/63/c5/b21cde720f54a1d1db71538c0bc9b73dee4b563a7dd7d2e404914904d7f5/ruff-0.12.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6f4064c69d2542029b2a61d39920c85240c39837599d7f2e32e80d36401d6e", size = 13468783, upload-time = "2025-08-21T18:22:42.559Z" }, - { url = "https://files.pythonhosted.org/packages/02/9e/39369e6ac7f2a1848f22fb0b00b690492f20811a1ac5c1fd1d2798329263/ruff-0.12.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:059e863ea3a9ade41407ad71c1de2badfbe01539117f38f763ba42a1206f7559", size = 14436642, upload-time = "2025-08-21T18:22:45.612Z" }, - { url = "https://files.pythonhosted.org/packages/e3/03/5da8cad4b0d5242a936eb203b58318016db44f5c5d351b07e3f5e211bb89/ruff-0.12.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1bef6161e297c68908b7218fa6e0e93e99a286e5ed9653d4be71e687dff101cf", size = 13859107, upload-time = "2025-08-21T18:22:48.886Z" }, - { url = "https://files.pythonhosted.org/packages/19/19/dd7273b69bf7f93a070c9cec9494a94048325ad18fdcf50114f07e6bf417/ruff-0.12.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4f1345fbf8fb0531cd722285b5f15af49b2932742fc96b633e883da8d841896b", size = 12886521, upload-time = "2025-08-21T18:22:51.567Z" }, - { url = "https://files.pythonhosted.org/packages/c0/1d/b4207ec35e7babaee62c462769e77457e26eb853fbdc877af29417033333/ruff-0.12.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f68433c4fbc63efbfa3ba5db31727db229fa4e61000f452c540474b03de52a9", size = 13097528, upload-time = "2025-08-21T18:22:54.609Z" }, - { url = "https://files.pythonhosted.org/packages/ff/00/58f7b873b21114456e880b75176af3490d7a2836033779ca42f50de3b47a/ruff-0.12.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:141ce3d88803c625257b8a6debf4a0473eb6eed9643a6189b68838b43e78165a", size = 13080443, upload-time = "2025-08-21T18:22:57.413Z" }, - { url = "https://files.pythonhosted.org/packages/12/8c/9e6660007fb10189ccb78a02b41691288038e51e4788bf49b0a60f740604/ruff-0.12.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f3fc21178cd44c98142ae7590f42ddcb587b8e09a3b849cbc84edb62ee95de60", size = 11896759, upload-time = "2025-08-21T18:23:00.473Z" }, - { url = "https://files.pythonhosted.org/packages/67/4c/6d092bb99ea9ea6ebda817a0e7ad886f42a58b4501a7e27cd97371d0ba54/ruff-0.12.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7d1a4e0bdfafcd2e3e235ecf50bf0176f74dd37902f241588ae1f6c827a36c56", size = 11701463, upload-time = "2025-08-21T18:23:03.211Z" }, - { url = "https://files.pythonhosted.org/packages/59/80/d982c55e91df981f3ab62559371380616c57ffd0172d96850280c2b04fa8/ruff-0.12.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e67d96827854f50b9e3e8327b031647e7bcc090dbe7bb11101a81a3a2cbf1cc9", size = 12691603, upload-time = "2025-08-21T18:23:06.935Z" }, - { url = "https://files.pythonhosted.org/packages/ad/37/63a9c788bbe0b0850611669ec6b8589838faf2f4f959647f2d3e320383ae/ruff-0.12.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ae479e1a18b439c59138f066ae79cc0f3ee250712a873d00dbafadaad9481e5b", size = 13164356, upload-time = "2025-08-21T18:23:10.225Z" }, - { url = "https://files.pythonhosted.org/packages/47/d4/1aaa7fb201a74181989970ebccd12f88c0fc074777027e2a21de5a90657e/ruff-0.12.10-py3-none-win32.whl", hash = "sha256:9de785e95dc2f09846c5e6e1d3a3d32ecd0b283a979898ad427a9be7be22b266", size = 11896089, upload-time = "2025-08-21T18:23:14.232Z" }, - { url = "https://files.pythonhosted.org/packages/ad/14/2ad38fd4037daab9e023456a4a40ed0154e9971f8d6aed41bdea390aabd9/ruff-0.12.10-py3-none-win_amd64.whl", hash = "sha256:7837eca8787f076f67aba2ca559cefd9c5cbc3a9852fd66186f4201b87c1563e", size = 13004616, upload-time = "2025-08-21T18:23:17.422Z" }, - { url = "https://files.pythonhosted.org/packages/24/3c/21cf283d67af33a8e6ed242396863af195a8a6134ec581524fd22b9811b6/ruff-0.12.10-py3-none-win_arm64.whl", hash = "sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc", size = 12074225, upload-time = "2025-08-21T18:23:20.137Z" }, +version = "0.13.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/33/c8e89216845615d14d2d42ba2bee404e7206a8db782f33400754f3799f05/ruff-0.13.1.tar.gz", hash = "sha256:88074c3849087f153d4bb22e92243ad4c1b366d7055f98726bc19aa08dc12d51", size = 5397987, upload-time = "2025-09-18T19:52:44.33Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/41/ca37e340938f45cfb8557a97a5c347e718ef34702546b174e5300dbb1f28/ruff-0.13.1-py3-none-linux_armv6l.whl", hash = "sha256:b2abff595cc3cbfa55e509d89439b5a09a6ee3c252d92020bd2de240836cf45b", size = 12304308, upload-time = "2025-09-18T19:51:56.253Z" }, + { url = "https://files.pythonhosted.org/packages/ff/84/ba378ef4129415066c3e1c80d84e539a0d52feb250685091f874804f28af/ruff-0.13.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4ee9f4249bf7f8bb3984c41bfaf6a658162cdb1b22e3103eabc7dd1dc5579334", size = 12937258, upload-time = "2025-09-18T19:52:00.184Z" }, + { url = "https://files.pythonhosted.org/packages/8d/b6/ec5e4559ae0ad955515c176910d6d7c93edcbc0ed1a3195a41179c58431d/ruff-0.13.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5c5da4af5f6418c07d75e6f3224e08147441f5d1eac2e6ce10dcce5e616a3bae", size = 12214554, upload-time = "2025-09-18T19:52:02.753Z" }, + { url = "https://files.pythonhosted.org/packages/70/d6/cb3e3b4f03b9b0c4d4d8f06126d34b3394f6b4d764912fe80a1300696ef6/ruff-0.13.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80524f84a01355a59a93cef98d804e2137639823bcee2931f5028e71134a954e", size = 12448181, upload-time = "2025-09-18T19:52:05.279Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ea/bf60cb46d7ade706a246cd3fb99e4cfe854efa3dfbe530d049c684da24ff/ruff-0.13.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff7f5ce8d7988767dd46a148192a14d0f48d1baea733f055d9064875c7d50389", size = 12104599, upload-time = "2025-09-18T19:52:07.497Z" }, + { url = "https://files.pythonhosted.org/packages/2d/3e/05f72f4c3d3a69e65d55a13e1dd1ade76c106d8546e7e54501d31f1dc54a/ruff-0.13.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c55d84715061f8b05469cdc9a446aa6c7294cd4bd55e86a89e572dba14374f8c", size = 13791178, upload-time = "2025-09-18T19:52:10.189Z" }, + { url = "https://files.pythonhosted.org/packages/81/e7/01b1fc403dd45d6cfe600725270ecc6a8f8a48a55bc6521ad820ed3ceaf8/ruff-0.13.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ac57fed932d90fa1624c946dc67a0a3388d65a7edc7d2d8e4ca7bddaa789b3b0", size = 14814474, upload-time = "2025-09-18T19:52:12.866Z" }, + { url = "https://files.pythonhosted.org/packages/fa/92/d9e183d4ed6185a8df2ce9faa3f22e80e95b5f88d9cc3d86a6d94331da3f/ruff-0.13.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c366a71d5b4f41f86a008694f7a0d75fe409ec298685ff72dc882f882d532e36", size = 14217531, upload-time = "2025-09-18T19:52:15.245Z" }, + { url = "https://files.pythonhosted.org/packages/3b/4a/6ddb1b11d60888be224d721e01bdd2d81faaf1720592858ab8bac3600466/ruff-0.13.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4ea9d1b5ad3e7a83ee8ebb1229c33e5fe771e833d6d3dcfca7b77d95b060d38", size = 13265267, upload-time = "2025-09-18T19:52:17.649Z" }, + { url = "https://files.pythonhosted.org/packages/81/98/3f1d18a8d9ea33ef2ad508f0417fcb182c99b23258ec5e53d15db8289809/ruff-0.13.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0f70202996055b555d3d74b626406476cc692f37b13bac8828acff058c9966a", size = 13243120, upload-time = "2025-09-18T19:52:20.332Z" }, + { url = "https://files.pythonhosted.org/packages/8d/86/b6ce62ce9c12765fa6c65078d1938d2490b2b1d9273d0de384952b43c490/ruff-0.13.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:f8cff7a105dad631085d9505b491db33848007d6b487c3c1979dd8d9b2963783", size = 13443084, upload-time = "2025-09-18T19:52:23.032Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6e/af7943466a41338d04503fb5a81b2fd07251bd272f546622e5b1599a7976/ruff-0.13.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:9761e84255443316a258dd7dfbd9bfb59c756e52237ed42494917b2577697c6a", size = 12295105, upload-time = "2025-09-18T19:52:25.263Z" }, + { url = "https://files.pythonhosted.org/packages/3f/97/0249b9a24f0f3ebd12f007e81c87cec6d311de566885e9309fcbac5b24cc/ruff-0.13.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:3d376a88c3102ef228b102211ef4a6d13df330cb0f5ca56fdac04ccec2a99700", size = 12072284, upload-time = "2025-09-18T19:52:27.478Z" }, + { url = "https://files.pythonhosted.org/packages/f6/85/0b64693b2c99d62ae65236ef74508ba39c3febd01466ef7f354885e5050c/ruff-0.13.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cbefd60082b517a82c6ec8836989775ac05f8991715d228b3c1d86ccc7df7dae", size = 12970314, upload-time = "2025-09-18T19:52:30.212Z" }, + { url = "https://files.pythonhosted.org/packages/96/fc/342e9f28179915d28b3747b7654f932ca472afbf7090fc0c4011e802f494/ruff-0.13.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:dd16b9a5a499fe73f3c2ef09a7885cb1d97058614d601809d37c422ed1525317", size = 13422360, upload-time = "2025-09-18T19:52:32.676Z" }, + { url = "https://files.pythonhosted.org/packages/37/54/6177a0dc10bce6f43e392a2192e6018755473283d0cf43cc7e6afc182aea/ruff-0.13.1-py3-none-win32.whl", hash = "sha256:55e9efa692d7cb18580279f1fbb525146adc401f40735edf0aaeabd93099f9a0", size = 12178448, upload-time = "2025-09-18T19:52:35.545Z" }, + { url = "https://files.pythonhosted.org/packages/64/51/c6a3a33d9938007b8bdc8ca852ecc8d810a407fb513ab08e34af12dc7c24/ruff-0.13.1-py3-none-win_amd64.whl", hash = "sha256:3a3fb595287ee556de947183489f636b9f76a72f0fa9c028bdcabf5bab2cc5e5", size = 13286458, upload-time = "2025-09-18T19:52:38.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/04/afc078a12cf68592345b1e2d6ecdff837d286bac023d7a22c54c7a698c5b/ruff-0.13.1-py3-none-win_arm64.whl", hash = "sha256:c0bae9ffd92d54e03c2bf266f466da0a65e145f298ee5b5846ed435f6a00518a", size = 12437893, upload-time = "2025-09-18T19:52:41.283Z" }, ] [[package]] @@ -1365,6 +1398,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, ] +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, +] + [[package]] name = "soupsieve" version = "2.8" @@ -1628,15 +1670,15 @@ wheels = [ [[package]] name = "starlette" -version = "0.47.3" +version = "0.48.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/15/b9/cc3017f9a9c9b6e27c5106cc10cc7904653c3eec0729793aec10479dd669/starlette-0.47.3.tar.gz", hash = "sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9", size = 2584144, upload-time = "2025-08-24T13:36:42.122Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/fd/901cfa59aaa5b30a99e16876f11abe38b59a1a2c51ffb3d7142bb6089069/starlette-0.47.3-py3-none-any.whl", hash = "sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51", size = 72991, upload-time = "2025-08-24T13:36:40.887Z" }, + { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, ] [[package]] @@ -1677,7 +1719,7 @@ dev = [ { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-immaterial" }, - { name = "testcontainers", extra = ["redis"] }, + { name = "trio" }, ] [package.metadata] @@ -1685,7 +1727,7 @@ requires-dist = [ { name = "anyio", specifier = ">=4.10.0" }, { name = "arq", marker = "extra == 'benchmark'", git = "https://github.com/graeme22/arq" }, { name = "async-lru", marker = "extra == 'web'", specifier = ">=2.0.5" }, - { name = "coredis", specifier = ">=5.0.1" }, + { name = "coredis", git = "https://github.com/Graeme22/coredis.git?rev=anyio" }, { name = "crontab", specifier = ">=1.0.5" }, { name = "fastapi", marker = "extra == 'web'", specifier = ">=0.116.1" }, { name = "jinja2", marker = "extra == 'web'", specifier = ">=3.1.6" }, @@ -1710,7 +1752,7 @@ dev = [ { name = "ruff", specifier = ">=0.12.10" }, { name = "sphinx", specifier = ">=8.1.3" }, { name = "sphinx-immaterial", specifier = ">=0.13.6" }, - { name = "testcontainers", extras = ["redis"], specifier = ">=4.12.0" }, + { name = "trio", specifier = ">=0.30.0" }, ] [[package]] @@ -1764,27 +1806,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/24/f9/0622ce1c5100b008b34cf7719a576491c1a0f22f50f8db090d797d8de6dc/taskiq_redis-1.1.0-py3-none-any.whl", hash = "sha256:516abe3cd703a7d97a5c0979102082e295d6cf2396a43b1c572382798df221cd", size = 20095, upload-time = "2025-07-25T07:37:58.908Z" }, ] -[[package]] -name = "testcontainers" -version = "4.12.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docker" }, - { name = "python-dotenv" }, - { name = "typing-extensions" }, - { name = "urllib3" }, - { name = "wrapt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d3/62/01d9f648e9b943175e0dcddf749cf31c769665d8ba08df1e989427163f33/testcontainers-4.12.0.tar.gz", hash = "sha256:13ee89cae995e643f225665aad8b200b25c4f219944a6f9c0b03249ec3f31b8d", size = 66631, upload-time = "2025-07-21T20:32:26.37Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/e8/9e2c392e5d671afda47b917597cac8fde6a452f5776c4c9ceb93fbd2889f/testcontainers-4.12.0-py3-none-any.whl", hash = "sha256:26caef57e642d5e8c5fcc593881cf7df3ab0f0dc9170fad22765b184e226ab15", size = 111791, upload-time = "2025-07-21T20:32:25.038Z" }, -] - -[package.optional-dependencies] -redis = [ - { name = "redis" }, -] - [[package]] name = "tomli" version = "2.2.1" @@ -1824,9 +1845,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, ] +[[package]] +name = "trio" +version = "0.31.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "cffi", marker = "implementation_name != 'pypy' and os_name == 'nt'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "outcome" }, + { name = "sniffio" }, + { name = "sortedcontainers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/8f/c6e36dd11201e2a565977d8b13f0b027ba4593c1a80bed5185489178e257/trio-0.31.0.tar.gz", hash = "sha256:f71d551ccaa79d0cb73017a33ef3264fde8335728eb4c6391451fe5d253a9d5b", size = 605825, upload-time = "2025-09-09T15:17:15.242Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/5b/94237a3485620dbff9741df02ff6d8acaa5fdec67d81ab3f62e4d8511bf7/trio-0.31.0-py3-none-any.whl", hash = "sha256:b5d14cd6293d79298b49c3485ffd9c07e3ce03a6da8c7dfbe0cb3dd7dc9a4774", size = 512679, upload-time = "2025-09-09T15:17:13.821Z" }, +] + [[package]] name = "typer" -version = "0.16.1" +version = "0.19.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -1834,9 +1873,9 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/78/d90f616bf5f88f8710ad067c1f8705bf7618059836ca084e5bb2a0855d75/typer-0.16.1.tar.gz", hash = "sha256:d358c65a464a7a90f338e3bb7ff0c74ac081449e53884b12ba658cbd72990614", size = 102836, upload-time = "2025-08-18T19:18:22.898Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/ca/950278884e2ca20547ff3eb109478c6baf6b8cf219318e6bc4f666fad8e8/typer-0.19.2.tar.gz", hash = "sha256:9ad824308ded0ad06cc716434705f691d4ee0bfd0fb081839d2e426860e7fdca", size = 104755, upload-time = "2025-09-23T09:47:48.256Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/76/06dbe78f39b2203d2a47d5facc5df5102d0561e2807396471b5f7c5a30a1/typer-0.16.1-py3-none-any.whl", hash = "sha256:90ee01cb02d9b8395ae21ee3368421faf21fa138cb2a541ed369c08cec5237c9", size = 46397, upload-time = "2025-08-18T19:18:21.663Z" }, + { url = "https://files.pythonhosted.org/packages/00/22/35617eee79080a5d071d0f14ad698d325ee6b3bf824fc0467c03b30e7fa8/typer-0.19.2-py3-none-any.whl", hash = "sha256:755e7e19670ffad8283db353267cb81ef252f595aa6834a0d1ca9312d9326cb9", size = 46748, upload-time = "2025-09-23T09:47:46.777Z" }, ] [[package]] @@ -1871,16 +1910,16 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.35.0" +version = "0.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, ] [[package]]