diff --git a/README.md b/README.md index bf9a4eee..b5af775b 100644 --- a/README.md +++ b/README.md @@ -71,6 +71,45 @@ TrainerClient().wait_for_job_status(job_id) print("\n".join(TrainerClient().get_job_logs(name=job_id))) ``` +## Local Development + +Kubeflow SDK provides first-class support for local development, allowing you to test and iterate on your models without needing a Kubernetes cluster. + +### Execution Backends + +Choose the backend that fits your development workflow: + +| Backend | Description | Use Case | +|---------|-------------|----------| +| **KubernetesBackend** | Run jobs on Kubernetes cluster | Production, multi-node distributed training | +| **ContainerBackend** | Auto-detects Docker or Podman | Local development with container isolation | +| **LocalProcessBackend** | Run as local Python subprocesses | Quick prototyping, debugging | + +### Local Container Execution + +The **ContainerBackend** automatically detects and uses either Docker or Podman: + +```bash +# Install with Docker support +pip install kubeflow[docker] + +# Or install with Podman support +pip install kubeflow[podman] +``` + +```python +from kubeflow.trainer import TrainerClient, ContainerBackendConfig, CustomTrainer + +# Auto-detects Docker or Podman +config = ContainerBackendConfig() +client = TrainerClient(backend_config=config) + +# Your training runs in isolated containers +job_id = client.train(trainer=CustomTrainer(func=train_fn)) +``` + +For detailed configuration options and platform-specific setup (macOS, Linux), see the [ContainerBackend documentation](kubeflow/trainer/backends/container/README.md). + ## Supported Kubeflow Projects | Project | Status | Version Support | Description | diff --git a/kubeflow/trainer/__init__.py b/kubeflow/trainer/__init__.py index 61a46b26..5bdfcfb2 100644 --- a/kubeflow/trainer/__init__.py +++ b/kubeflow/trainer/__init__.py @@ -15,6 +15,7 @@ # Import the Kubeflow Trainer client. from kubeflow.trainer.api.trainer_client import TrainerClient # noqa: F401 +from kubeflow.trainer.backends.container.types import ContainerBackendConfig # import backends and its associated configs from kubeflow.trainer.backends.kubernetes.types import KubernetesBackendConfig @@ -58,5 +59,6 @@ "TrainerClient", "TrainerType", "LocalProcessBackendConfig", + "ContainerBackendConfig", "KubernetesBackendConfig", ] diff --git a/kubeflow/trainer/api/trainer_client.py b/kubeflow/trainer/api/trainer_client.py index 6b564c90..553f585e 100644 --- a/kubeflow/trainer/api/trainer_client.py +++ b/kubeflow/trainer/api/trainer_client.py @@ -16,6 +16,8 @@ import logging from typing import Optional, Union +from kubeflow.trainer.backends.container.backend import ContainerBackend +from kubeflow.trainer.backends.container.types import ContainerBackendConfig from kubeflow.trainer.backends.kubernetes.backend import KubernetesBackend from kubeflow.trainer.backends.kubernetes.types import KubernetesBackendConfig from kubeflow.trainer.backends.localprocess.backend import ( @@ -31,14 +33,19 @@ class TrainerClient: def __init__( self, - backend_config: Union[KubernetesBackendConfig, LocalProcessBackendConfig] = None, + backend_config: Union[ + KubernetesBackendConfig, + LocalProcessBackendConfig, + ContainerBackendConfig, + ] = None, ): """Initialize a Kubeflow Trainer client. Args: - backend_config: Backend configuration. Either KubernetesBackendConfig or - LocalProcessBackendConfig, or None to use the backend's - default config class. Defaults to KubernetesBackendConfig. + backend_config: Backend configuration. Either KubernetesBackendConfig, + LocalProcessBackendConfig, ContainerBackendConfig, + or None to use the backend's default config class. + Defaults to KubernetesBackendConfig. Raises: ValueError: Invalid backend configuration. @@ -52,6 +59,8 @@ def __init__( self.backend = KubernetesBackend(backend_config) elif isinstance(backend_config, LocalProcessBackendConfig): self.backend = LocalProcessBackend(backend_config) + elif isinstance(backend_config, ContainerBackendConfig): + self.backend = ContainerBackend(backend_config) else: raise ValueError(f"Invalid backend config '{backend_config}'") diff --git a/kubeflow/trainer/backends/container/README.md b/kubeflow/trainer/backends/container/README.md new file mode 100644 index 00000000..db423466 --- /dev/null +++ b/kubeflow/trainer/backends/container/README.md @@ -0,0 +1,162 @@ +# ContainerBackend + +The unified container backend for Kubeflow Trainer that automatically detects and uses either Docker or Podman. + +## Overview + +This backend provides a single, unified interface for container-based training execution, automatically detecting which container runtime is available on your system. + +The implementation uses the **adapter pattern** to abstract away differences between Docker and Podman APIs, providing clean separation between runtime detection logic and container operations. + +## Usage + +### Basic usage (auto-detection) + +```python +from kubeflow.trainer import TrainerClient, ContainerBackendConfig + +# Auto-detects Docker or Podman +config = ContainerBackendConfig() +client = TrainerClient(backend_config=config) +``` + +### Force specific runtime + +```python +# Force Docker +config = ContainerBackendConfig(runtime="docker") +client = TrainerClient(backend_config=config) + +# Force Podman +config = ContainerBackendConfig(runtime="podman") +client = TrainerClient(backend_config=config) +``` + +### Configuration options + +```python +config = ContainerBackendConfig( + # Optional: force specific runtime ("docker" or "podman") + runtime=None, + + # Optional: explicit image override + image="my-custom-image:latest", + + # Image pull policy: "IfNotPresent", "Always", or "Never" + pull_policy="IfNotPresent", + + # Auto-remove containers and networks on job deletion + auto_remove=True, + + # GPU support (varies by runtime) + gpus=None, + + # Environment variables for all containers + env={"MY_VAR": "value"}, + + # Container daemon URL override (required for Colima/Podman Machine on macOS) + container_host=None, + + # Base directory for job workspaces + workdir_base=None, +) +``` + +### macOS-specific configuration + +On macOS, you may need to specify `container_host` depending on your container runtime: + +**Docker with Colima:** +```python +import os +config = ContainerBackendConfig( + container_host=f"unix://{os.path.expanduser('~')}/.colima/default/docker.sock" +) +``` + +**Podman Machine:** +```python +import os +config = ContainerBackendConfig( + container_host=f"unix://{os.path.expanduser('~')}/.local/share/containers/podman/machine/podman.sock" +) +``` + +**Docker Desktop:** +```python +# Usually works without specifying container_host +config = ContainerBackendConfig() +``` + +Alternatively, set environment variables before running: +```bash +# For Colima +export DOCKER_HOST="unix://$HOME/.colima/default/docker.sock" + +# For Podman Machine +export CONTAINER_HOST="unix://$HOME/.local/share/containers/podman/machine/podman.sock" +``` + +### How it works + +The backend initialization follows this logic: + +1. If `runtime` is specified in config, use that runtime exclusively +2. Otherwise, try to initialize Docker client adapter +3. If Docker fails, try to initialize Podman client adapter +4. If both fail, raise a RuntimeError + +If you don't have Docker or Podman installed, use `LocalProcessBackendConfig` instead, which runs training as local subprocesses. + +All container operations are delegated to the adapter, eliminating code duplication. + +## Installation + +Install with Docker support: +```bash +pip install kubeflow[docker] +``` + +Install with Podman support: +```bash +pip install kubeflow[podman] +``` + +Install with both: +```bash +pip install kubeflow[docker,podman] +``` + +## Example: Training Job + +```python +from kubeflow.trainer import TrainerClient, ContainerBackendConfig, CustomTrainer + +# Define your training function +def train(): + import torch + print(f"Training with PyTorch {torch.__version__}") + # Your training code here + +# Create trainer +trainer = CustomTrainer( + func=train, + packages_to_install=["torch"], +) + +# Initialize client (auto-detects runtime) +config = ContainerBackendConfig() +client = TrainerClient(backend_config=config) + +# Run training +job_name = client.train(trainer=trainer) +print(f"Training job started: {job_name}") + +# Get logs +for log in client.get_job_logs(job_name, follow=True): + print(log, end='') +``` + +## See also + +- [Example notebook](TBA) - Complete working example to be added diff --git a/kubeflow/trainer/backends/container/__init__.py b/kubeflow/trainer/backends/container/__init__.py new file mode 100644 index 00000000..776237c3 --- /dev/null +++ b/kubeflow/trainer/backends/container/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from kubeflow.trainer.backends.container.backend import ContainerBackend +from kubeflow.trainer.backends.container.types import ContainerBackendConfig + +__all__ = ["ContainerBackend", "ContainerBackendConfig"] diff --git a/kubeflow/trainer/backends/container/backend.py b/kubeflow/trainer/backends/container/backend.py new file mode 100644 index 00000000..eb8b989b --- /dev/null +++ b/kubeflow/trainer/backends/container/backend.py @@ -0,0 +1,627 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +ContainerBackend +---------------- + +Unified local execution backend for `CustomTrainer` jobs using containers. + +This backend automatically detects and uses either Docker or Podman. +It provides a single interface regardless of the underlying container runtime. + +Key behaviors: +- Auto-detection: Tries Docker first, then Podman. Can be overridden via config. +- Multi-node jobs: one container per node connected via a per-job network. +- Entry script generation: we serialize the user's training function to a small + Python file and invoke it inside the container using `torchrun` (preferred) or + `python` as a fallback. +- Runtimes: we use `config/local_runtimes` to define runtime images and + characteristics (e.g., torch). Defaults to `torch-distributed` if no runtime + is provided. +- Image pulling: controlled via `pull_policy` and performed automatically if + needed. +- Logs and lifecycle: streaming logs and deletion semantics similar to the + Docker/Podman backends, but with automatic runtime detection. +""" + +from __future__ import annotations + +from collections.abc import Iterator +from dataclasses import dataclass +from datetime import datetime +import logging +import os +from pathlib import Path +import random +import shutil +import string +import uuid + +from kubeflow.trainer.backends.base import ExecutionBackend +from kubeflow.trainer.backends.container.client_adapter import ( + ContainerClientAdapter, + DockerClientAdapter, + PodmanClientAdapter, +) +from kubeflow.trainer.backends.container.runtime_loader import ( + LOCAL_RUNTIMES_DIR, + get_local_runtime, + list_local_runtimes, +) +from kubeflow.trainer.backends.container.types import ContainerBackendConfig +from kubeflow.trainer.constants import constants +from kubeflow.trainer.types import types + +logger = logging.getLogger(__name__) + + +@dataclass +class _Node: + name: str + container_id: str + status: str = constants.TRAINJOB_CREATED + + +@dataclass +class _Job: + name: str + created: datetime + runtime: types.Runtime + network_id: str + nodes: list[_Node] + workdir_host: str + + +class ContainerBackend(ExecutionBackend): + """ + Unified container backend that auto-detects Docker or Podman. + + This backend uses the adapter pattern to abstract away differences between + Docker and Podman, providing a single consistent interface. + """ + + def __init__(self, cfg: ContainerBackendConfig): + self.cfg = cfg + self._jobs: dict[str, _Job] = {} + self.label_prefix = "trainer.kubeflow.org" + + # Initialize the container client adapter + self._adapter = self._create_adapter() + + def _create_adapter(self) -> ContainerClientAdapter: + """ + Create the appropriate container client adapter. + + Tries Docker first, then Podman if Docker fails, unless a specific + runtime is requested in the config. + + Raises RuntimeError if neither Docker nor Podman are available. + """ + if self.cfg.runtime: + # User specified a runtime explicitly + if self.cfg.runtime == "docker": + adapter = DockerClientAdapter(self.cfg.container_host) + adapter.ping() + logger.info("Using Docker as container runtime") + return adapter + elif self.cfg.runtime == "podman": + adapter = PodmanClientAdapter(self.cfg.container_host) + adapter.ping() + logger.info("Using Podman as container runtime") + return adapter + else: + # Auto-detect: try Docker first, then Podman + try: + adapter = DockerClientAdapter(self.cfg.container_host) + adapter.ping() + logger.info("Using Docker as container runtime") + return adapter + except Exception as docker_error: + logger.debug(f"Docker initialization failed: {docker_error}") + try: + adapter = PodmanClientAdapter(self.cfg.container_host) + adapter.ping() + logger.info("Using Podman as container runtime") + return adapter + except Exception as podman_error: + logger.debug(f"Podman initialization failed: {podman_error}") + raise RuntimeError( + "Neither Docker nor Podman is available. " + "Please install Docker or Podman, or use LocalProcessBackendConfig instead." + ) from podman_error + + @property + def _runtime_type(self) -> str: + """Get the runtime type for debugging/logging.""" + return self._adapter._runtime_type + + # ---- Runtime APIs ---- + def list_runtimes(self) -> list[types.Runtime]: + return list_local_runtimes() + + def get_runtime(self, name: str) -> types.Runtime: + return get_local_runtime(name) + + def get_runtime_packages(self, runtime: types.Runtime): + """ + Spawn a short-lived container to report Python version, pip list, and nvidia-smi. + """ + image = self._resolve_image(runtime) + self._maybe_pull_image(image) + + command = [ + "bash", + "-lc", + "python -c \"import sys; print(f'Python: {sys.version}')\" && " + "(pip list || echo 'pip not found') && " + "(nvidia-smi || echo 'nvidia-smi not found')", + ] + + logs = self._adapter.run_oneoff_container(image=image, command=command) + print(logs) + + def train( + self, + runtime: types.Runtime | None = None, + initializer: types.Initializer | None = None, + trainer: types.CustomTrainer | types.BuiltinTrainer | None = None, + ) -> str: + if runtime is None: + runtime = self.get_runtime("torch-distributed") + + if not isinstance(trainer, types.CustomTrainer): + raise ValueError(f"{self.__class__.__name__} supports only CustomTrainer in v1") + + # Generate job name + job_name = random.choice(string.ascii_lowercase) + uuid.uuid4().hex[:11] + logger.info(f"Starting training job: {job_name}") + + try: + # Create per-job working directory on host + workdir = self._create_workdir(job_name) + logger.debug(f"Created working directory: {workdir}") + + _ = self._write_training_script(workdir, trainer) + logger.debug(f"Wrote training script to {workdir}/train.py") + + # Resolve image and pull if needed + image = self._resolve_image(runtime) + logger.debug(f"Using image: {image}") + + self._maybe_pull_image(image) + logger.debug(f"Image ready: {image}") + + # Build base environment + env = self._build_environment(trainer) + + # Construct pre-run command to install packages + pre_install_cmd = self._build_pip_install_cmd(trainer) + + # Create network for multi-node communication + num_nodes = trainer.num_nodes or runtime.trainer.num_nodes or 1 + logger.debug(f"Creating network for {num_nodes} nodes") + + network_id = self._adapter.create_network( + name=f"{job_name}-net", + labels={f"{self.label_prefix}/trainjob-name": job_name}, + ) + logger.info(f"Created network: {network_id}") + + # Create N containers (one per node) + containers: list[_Node] = [] + master_container_id = None + master_ip = None + + for rank in range(num_nodes): + container_name = f"{job_name}-node-{rank}" + + # Get master address and port for torchrun + master_port = 29500 + + # For Podman: use IP address to avoid DNS timing issues + # For Docker: use hostname (DNS is reliable) + if rank == 0: + # Master node - will be created first + master_addr = f"{job_name}-node-0" + else: + # Worker nodes - determine master address based on runtime + if self._runtime_type == "podman" and master_ip: + master_addr = master_ip + logger.debug(f"Using master IP address for Podman: {master_ip}") + else: + master_addr = f"{job_name}-node-0" + logger.debug(f"Using master hostname: {master_addr}") + + # Prefer torchrun; fall back to python if torchrun is unavailable + # For worker nodes, wait for master to be reachable before starting torchrun + wait_for_master = "" + if rank > 0: + wait_for_master = ( + f"echo 'Waiting for master node {master_addr}:{master_port}...'; " + f"for i in {{1..60}}; do " + f" if timeout 1 bash -c 'cat < /dev/null > " + f"/dev/tcp/{master_addr}/{master_port}' 2>/dev/null; then " + f" echo 'Master node is reachable'; break; " + f" fi; " + f" if [ $i -eq 60 ]; then " + f"echo 'Timeout waiting for master node'; exit 1; fi; " + f" sleep 2; " + f"done; " + ) + + entry_cmd = ( + f"{pre_install_cmd}" + f"{wait_for_master}" + "if command -v torchrun >/dev/null 2>&1; then " + f" torchrun --nproc_per_node=1 --nnodes={num_nodes} " + f" --node-rank={rank} --rdzv-backend=c10d " + f" --rdzv-endpoint={master_addr}:{master_port} " + f" /workspace/train.py; " + "else " + f" python /workspace/train.py; " + "fi" + ) + + full_cmd = ["bash", "-lc", entry_cmd] + + labels = { + f"{self.label_prefix}/trainjob-name": job_name, + f"{self.label_prefix}/step": f"node-{rank}", + } + + volumes = { + workdir: { + "bind": "/workspace", + "mode": "rw", + } + } + + logger.debug(f"Creating container {rank}/{num_nodes}: {container_name}") + + container_id = self._adapter.create_and_start_container( + image=image, + command=full_cmd, + name=container_name, + network_id=network_id, + environment=env, + labels=labels, + volumes=volumes, + working_dir="/workspace", + ) + + logger.info(f"Started container {container_name} (ID: {container_id[:12]})") + containers.append(_Node(name=container_name, container_id=container_id)) + + # If this is the master node and we're using Podman, get its IP address + if rank == 0: + master_container_id = container_id + if self._runtime_type == "podman": + # Get master IP for worker nodes to use + master_ip = self._adapter.get_container_ip(master_container_id, network_id) + if master_ip: + logger.info(f"Master node IP address: {master_ip}") + else: + logger.warning( + "Could not retrieve master IP address. " + "Worker nodes will fall back to DNS resolution." + ) + + # Store job in backend + self._jobs[job_name] = _Job( + name=job_name, + created=datetime.now(), + runtime=runtime, + network_id=network_id, + nodes=containers, + workdir_host=workdir, + ) + + logger.info( + f"Training job {job_name} created successfully with {len(containers)} container(s)" + ) + return job_name + + except Exception as e: + # Clean up on failure + logger.error(f"Failed to create training job {job_name}: {e}") + logger.exception("Full traceback:") + + # Try to clean up any resources that were created + from contextlib import suppress + + try: + # Stop and remove any containers that were created + if "containers" in locals(): + for node in containers: + with suppress(Exception): + self._adapter.stop_container(node.container_id, timeout=5) + self._adapter.remove_container(node.container_id, force=True) + + # Remove network if it was created + if "network_id" in locals(): + with suppress(Exception): + self._adapter.delete_network(network_id) + + # Remove working directory if it was created + if "workdir" in locals() and os.path.isdir(workdir): + shutil.rmtree(workdir, ignore_errors=True) + + except Exception as cleanup_error: + logger.error(f"Error during cleanup: {cleanup_error}") + + # Re-raise the original exception + raise + + def list_jobs(self, runtime: types.Runtime | None = None) -> list[types.TrainJob]: + result: list[types.TrainJob] = [] + for job in self._jobs.values(): + if runtime and job.runtime.name != runtime.name: + continue + steps = [] + for node in job.nodes: + steps.append( + types.Step( + name=node.name.split(f"{job.name}-")[-1], + pod_name=node.name, + status=self._container_status(node.container_id), + ) + ) + result.append( + types.TrainJob( + name=job.name, + creation_timestamp=job.created, + runtime=job.runtime, + steps=steps, + num_nodes=len(job.nodes), + status=self._aggregate_status(job), + ) + ) + return result + + def get_job(self, name: str) -> types.TrainJob: + job = self._jobs.get(name) + if not job: + raise ValueError(f"No TrainJob with name {name}") + # Refresh container statuses on demand + steps: list[types.Step] = [] + for node in job.nodes: + status = self._container_status(node.container_id) + steps.append( + types.Step( + name=node.name.split(f"{job.name}-")[-1], + pod_name=node.name, + status=status, + ) + ) + return types.TrainJob( + name=job.name, + creation_timestamp=job.created, + runtime=job.runtime, + steps=steps, + num_nodes=len(job.nodes), + status=self._aggregate_status(job), + ) + + def get_job_logs( + self, + name: str, + follow: bool = False, + step: str = constants.NODE + "-0", + ) -> Iterator[str]: + job = self._jobs.get(name) + if not job: + raise ValueError(f"No TrainJob with name {name}") + + want_all = step == constants.NODE + "-0" + for node in job.nodes: + node_step = node.name.split(f"{job.name}-")[-1] + if not want_all and node_step != step: + continue + try: + yield from self._adapter.container_logs(node.container_id, follow) + except Exception as e: + logger.warning(f"Failed to get logs for {node.name}: {e}") + yield f"Error getting logs: {e}\n" + + def wait_for_job_status( + self, + name: str, + status: set[str] = {constants.TRAINJOB_COMPLETE}, + timeout: int = 600, + polling_interval: int = 2, + ) -> types.TrainJob: + import time + + end = time.time() + timeout + while time.time() < end: + tj = self.get_job(name) + logger.debug(f"TrainJob {name}, status {tj.status}") + if tj.status in status: + return tj + if constants.TRAINJOB_FAILED not in status and tj.status == constants.TRAINJOB_FAILED: + raise RuntimeError(f"TrainJob {name} is Failed") + time.sleep(polling_interval) + raise TimeoutError(f"Timeout waiting for TrainJob {name} to reach status: {status}") + + def delete_job(self, name: str): + job = self._jobs.get(name) + if not job: + raise ValueError(f"No TrainJob with name {name}") + + # Stop containers and remove + from contextlib import suppress + + for node in job.nodes: + with suppress(Exception): + self._adapter.stop_container(node.container_id, timeout=10) + with suppress(Exception): + self._adapter.remove_container(node.container_id, force=True) + + # Remove network (best-effort) + with suppress(Exception): + self._adapter.delete_network(job.network_id) + + # Remove working directory if configured + if self.cfg.auto_remove and os.path.isdir(job.workdir_host): + shutil.rmtree(job.workdir_host, ignore_errors=True) + + del self._jobs[name] + + # Helper methods + + def _create_workdir(self, job_name: str) -> str: + """Create per-job working directory on host.""" + workdir_base = self.cfg.workdir_base + if workdir_base: + base = Path(workdir_base) + base.mkdir(parents=True, exist_ok=True) + workdir = str((base / f"{job_name}").resolve()) + os.makedirs(workdir, exist_ok=True) + else: + backend_name = ( + self.__class__.__name__.lower().replace("local", "").replace("backend", "") + ) + home_base = Path.home() / ".kubeflow_trainer" / f"local{backend_name}" + home_base.mkdir(parents=True, exist_ok=True) + workdir = str((home_base / f"{job_name}").resolve()) + os.makedirs(workdir, exist_ok=True) + return workdir + + def _write_training_script(self, workdir: str, trainer: types.CustomTrainer) -> Path: + """Write the training script to the working directory.""" + script_path = Path(workdir) / "train.py" + import inspect + import textwrap + + code = inspect.getsource(trainer.func) + code = textwrap.dedent(code) + if trainer.func_args is None: + code += f"\n{trainer.func.__name__}()\n" + else: + code += f"\n{trainer.func.__name__}({trainer.func_args})\n" + script_path.write_text(code) + return script_path + + def _build_environment(self, trainer: types.CustomTrainer) -> dict[str, str]: + """Build environment variables for containers.""" + env = dict(self.cfg.env or {}) + if trainer.env: + env.update(trainer.env) + return env + + def _build_pip_install_cmd(self, trainer: types.CustomTrainer) -> str: + """Build pip install command for packages.""" + pkgs = trainer.packages_to_install or [] + if not pkgs: + return "" + + index_urls = trainer.pip_index_urls or list(constants.DEFAULT_PIP_INDEX_URLS) + main_idx = index_urls[0] + extras = " ".join(f"--extra-index-url {u}" for u in index_urls[1:]) + quoted = " ".join(f'"{p}"' for p in pkgs) + return ( + "PIP_DISABLE_PIP_VERSION_CHECK=1 pip install --no-warn-script-location " + f"--index-url {main_idx} {extras} {quoted} && " + ) + + def _maybe_pull_image(self, image: str): + """Pull image based on pull policy.""" + policy = (self.cfg.pull_policy or "IfNotPresent").lower() + try: + if policy == "never": + if not self._adapter.image_exists(image): + raise RuntimeError( + f"Image '{image}' not found locally and pull policy is Never" + ) + return + if policy == "always": + logger.debug(f"Pulling image (Always): {image}") + self._adapter.pull_image(image) + return + # IfNotPresent + if not self._adapter.image_exists(image): + logger.debug(f"Pulling image (IfNotPresent): {image}") + self._adapter.pull_image(image) + except Exception as e: + raise RuntimeError(f"Failed to ensure image '{image}': {e}") from e + + def _container_status(self, container_id: str) -> str: + """Get the status of a container.""" + try: + status, exit_code = self._adapter.container_status(container_id) + if status == "running": + return constants.TRAINJOB_RUNNING + if status == "created": + return constants.TRAINJOB_CREATED + if status == "exited": + # Exit code 0 -> complete, else failed + return constants.TRAINJOB_COMPLETE if exit_code == 0 else constants.TRAINJOB_FAILED + except Exception: + return constants.UNKNOWN + return constants.UNKNOWN + + def _aggregate_status(self, job: _Job) -> str: + """Aggregate status from all containers in a job.""" + statuses = [self._container_status(n.container_id) for n in job.nodes] + if constants.TRAINJOB_FAILED in statuses: + return constants.TRAINJOB_FAILED + if constants.TRAINJOB_RUNNING in statuses: + return constants.TRAINJOB_RUNNING + if all(s == constants.TRAINJOB_COMPLETE for s in statuses if s != constants.UNKNOWN): + return constants.TRAINJOB_COMPLETE + if any(s == constants.TRAINJOB_CREATED for s in statuses): + return constants.TRAINJOB_CREATED + return constants.UNKNOWN + + def _resolve_image(self, runtime: types.Runtime) -> str: + """Resolve the container image for a runtime.""" + if self.cfg.image: + return self.cfg.image + + import yaml + + for f in sorted(LOCAL_RUNTIMES_DIR.glob("*.yaml")): + try: + data = yaml.safe_load(Path(f).read_text()) + if ( + data.get("kind") in {"ClusterTrainingRuntime", "TrainingRuntime"} + and data.get("metadata", {}).get("name") == runtime.name + ): + replicated = ( + data.get("spec", {}) + .get("template", {}) + .get("spec", {}) + .get("replicatedJobs", []) + ) + node_jobs = [j for j in replicated if j.get("name") == "node"] + if node_jobs: + node_spec = ( + node_jobs[0] + .get("template", {}) + .get("spec", {}) + .get("template", {}) + .get("spec", {}) + ) + containers = node_spec.get("containers", []) + if containers and containers[0].get("image"): + return str(containers[0]["image"]) + except Exception: + continue + + raise ValueError( + f"No image specified for runtime '{runtime.name}'. " + f"Provide ContainerBackendConfig.image or add an 'image' field " + f"to its YAML in {LOCAL_RUNTIMES_DIR}." + ) diff --git a/kubeflow/trainer/backends/container/backend_test.py b/kubeflow/trainer/backends/container/backend_test.py new file mode 100644 index 00000000..dbcce3d7 --- /dev/null +++ b/kubeflow/trainer/backends/container/backend_test.py @@ -0,0 +1,667 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for ContainerBackend. + +Tests the ContainerBackend class with mocked container adapters. +""" + +from collections.abc import Iterator +import os +from pathlib import Path +import shutil +import tempfile +from unittest.mock import Mock, patch + +import pytest + +from kubeflow.trainer.backends.container.backend import ContainerBackend +from kubeflow.trainer.backends.container.client_adapter import ContainerClientAdapter +from kubeflow.trainer.backends.container.types import ContainerBackendConfig +from kubeflow.trainer.constants import constants +from kubeflow.trainer.test.common import FAILED, SUCCESS, TestCase +from kubeflow.trainer.types import types + + +# Mock Container Adapter +class MockContainerAdapter(ContainerClientAdapter): + """Mock adapter for testing ContainerBackend without Docker/Podman.""" + + def __init__(self): + self._runtime_type = "mock" + self.networks_created = [] + self.containers_created = [] + self.containers_stopped = [] + self.containers_removed = [] + self.networks_deleted = [] + self.images_pulled = [] + self.ping_called = False + + def ping(self): + self.ping_called = True + + def create_network(self, name: str, labels: dict[str, str]) -> str: + network_id = f"net-{name}" + self.networks_created.append({"id": network_id, "name": name, "labels": labels}) + return network_id + + def delete_network(self, network_id: str): + self.networks_deleted.append(network_id) + + def create_and_start_container( + self, + image: str, + command: list[str], + name: str, + network_id: str, + environment: dict[str, str], + labels: dict[str, str], + volumes: dict[str, dict[str, str]], + working_dir: str, + ) -> str: + container_id = f"container-{len(self.containers_created)}" + self.containers_created.append( + { + "id": container_id, + "name": name, + "image": image, + "command": command, + "network": network_id, + "environment": environment, + "labels": labels, + "volumes": volumes, + "working_dir": working_dir, + "status": "running", + "exit_code": None, + } + ) + return container_id + + def get_container(self, container_id: str): + for container in self.containers_created: + if container["id"] == container_id: + return Mock(id=container_id, status=container["status"]) + return None + + def container_logs(self, container_id: str, follow: bool) -> Iterator[str]: + if follow: + yield f"Log line 1 from {container_id}\n" + yield f"Log line 2 from {container_id}\n" + else: + yield f"Complete log from {container_id}\n" + + def stop_container(self, container_id: str, timeout: int = 10): + self.containers_stopped.append(container_id) + for container in self.containers_created: + if container["id"] == container_id: + container["status"] = "exited" + container["exit_code"] = 0 + + def remove_container(self, container_id: str, force: bool = True): + self.containers_removed.append(container_id) + + def pull_image(self, image: str): + self.images_pulled.append(image) + + def image_exists(self, image: str) -> bool: + return "local" in image or image in self.images_pulled + + def run_oneoff_container(self, image: str, command: list[str]) -> str: + return "Python 3.9.0\npip 21.0.1\nnvidia-smi not found\n" + + def container_status(self, container_id: str) -> tuple[str, int | None]: + for container in self.containers_created: + if container["id"] == container_id: + return (container["status"], container.get("exit_code")) + return ("unknown", None) + + def set_container_status(self, container_id: str, status: str, exit_code: int | None = None): + """Helper method to set container status for testing.""" + for container in self.containers_created: + if container["id"] == container_id: + container["status"] = status + container["exit_code"] = exit_code + + +# Fixtures +@pytest.fixture +def container_backend(): + """Provide ContainerBackend with mocked adapter.""" + backend = ContainerBackend(ContainerBackendConfig()) + backend._adapter = MockContainerAdapter() + return backend + + +@pytest.fixture +def temp_workdir(): + """Provide a temporary working directory.""" + tmpdir = tempfile.mkdtemp() + yield tmpdir + if os.path.exists(tmpdir): + shutil.rmtree(tmpdir) + + +# Helper Function +def simple_train_func(): + """Simple training function for tests.""" + print("Training") + + +# Tests +@pytest.mark.parametrize( + "test_case", + [ + TestCase( + name="auto-detect docker first", + expected_status=SUCCESS, + ), + TestCase( + name="auto-detect falls back to podman", + expected_status=SUCCESS, + ), + TestCase( + name="both unavailable raises error", + expected_status=FAILED, + expected_error=RuntimeError, + ), + ], +) +def test_backend_initialization(test_case): + """Test ContainerBackend initialization and adapter creation.""" + print("Executing test:", test_case.name) + try: + if test_case.name == "auto-detect docker first": + with ( + patch( + "kubeflow.trainer.backends.container.backend.DockerClientAdapter" + ) as mock_docker, + patch( + "kubeflow.trainer.backends.container.backend.PodmanClientAdapter" + ) as mock_podman, + ): + mock_docker_instance = Mock() + mock_docker.return_value = mock_docker_instance + + _ = ContainerBackend(ContainerBackendConfig()) + + mock_docker.assert_called_once_with(None) + mock_docker_instance.ping.assert_called_once() + mock_podman.assert_not_called() + assert test_case.expected_status == SUCCESS + + elif test_case.name == "auto-detect falls back to podman": + with ( + patch( + "kubeflow.trainer.backends.container.backend.DockerClientAdapter" + ) as mock_docker, + patch( + "kubeflow.trainer.backends.container.backend.PodmanClientAdapter" + ) as mock_podman, + ): + mock_docker_instance = Mock() + mock_docker_instance.ping.side_effect = Exception("Docker not available") + mock_docker.return_value = mock_docker_instance + + mock_podman_instance = Mock() + mock_podman.return_value = mock_podman_instance + + _ = ContainerBackend(ContainerBackendConfig()) + + mock_docker.assert_called_once() + mock_podman.assert_called_once_with(None) + mock_podman_instance.ping.assert_called_once() + assert test_case.expected_status == SUCCESS + + elif test_case.name == "both unavailable raises error": + with ( + patch( + "kubeflow.trainer.backends.container.backend.DockerClientAdapter" + ) as mock_docker, + patch( + "kubeflow.trainer.backends.container.backend.PodmanClientAdapter" + ) as mock_podman, + ): + mock_docker_instance = Mock() + mock_docker_instance.ping.side_effect = Exception("Docker not available") + mock_docker.return_value = mock_docker_instance + + mock_podman_instance = Mock() + mock_podman_instance.ping.side_effect = Exception("Podman not available") + mock_podman.return_value = mock_podman_instance + + ContainerBackend(ContainerBackendConfig()) + + except Exception as e: + assert type(e) is test_case.expected_error + print("test execution complete") + + +def test_list_runtimes(container_backend): + """Test listing available local runtimes.""" + print("Executing test: list_runtimes") + runtimes = container_backend.list_runtimes() + + assert isinstance(runtimes, list) + assert len(runtimes) > 0 + runtime_names = [r.name for r in runtimes] + assert "torch-distributed" in runtime_names + print("test execution complete") + + +@pytest.mark.parametrize( + "test_case", + [ + TestCase( + name="get valid runtime", + expected_status=SUCCESS, + config={"name": "torch-distributed"}, + ), + TestCase( + name="get invalid runtime", + expected_status=FAILED, + config={"name": "nonexistent-runtime"}, + expected_error=ValueError, + ), + ], +) +def test_get_runtime(container_backend, test_case): + """Test getting a specific runtime.""" + print("Executing test:", test_case.name) + try: + runtime = container_backend.get_runtime(**test_case.config) + + assert test_case.expected_status == SUCCESS + assert isinstance(runtime, types.Runtime) + assert runtime.name == test_case.config["name"] + + except Exception as e: + assert type(e) is test_case.expected_error + print("test execution complete") + + +def test_get_runtime_packages(container_backend): + """Test getting runtime packages.""" + print("Executing test: get_runtime_packages") + runtime = container_backend.get_runtime("torch-distributed") + container_backend.get_runtime_packages(runtime) + + assert len( + container_backend._adapter.images_pulled + ) > 0 or container_backend._adapter.image_exists(runtime.trainer.image) + print("test execution complete") + + +@pytest.mark.parametrize( + "test_case", + [ + TestCase( + name="train single node", + expected_status=SUCCESS, + config={"num_nodes": 1, "expected_containers": 1}, + ), + TestCase( + name="train multi-node", + expected_status=SUCCESS, + config={"num_nodes": 3, "expected_containers": 3}, + ), + TestCase( + name="train with custom env", + expected_status=SUCCESS, + config={ + "num_nodes": 1, + "env": {"MY_VAR": "my_value", "DEBUG": "true"}, + "expected_containers": 1, + }, + ), + TestCase( + name="train with packages", + expected_status=SUCCESS, + config={ + "num_nodes": 1, + "packages": ["numpy", "pandas"], + "expected_containers": 1, + }, + ), + ], +) +def test_train(container_backend, test_case): + """Test training job creation.""" + print("Executing test:", test_case.name) + try: + trainer = types.CustomTrainer( + func=simple_train_func, + num_nodes=test_case.config.get("num_nodes", 1), + env=test_case.config.get("env"), + packages_to_install=test_case.config.get("packages"), + ) + runtime = container_backend.get_runtime("torch-distributed") + + job_name = container_backend.train(runtime=runtime, trainer=trainer) + + assert test_case.expected_status == SUCCESS + assert job_name is not None + assert len(job_name) == 12 + assert ( + len(container_backend._adapter.containers_created) + == test_case.config["expected_containers"] + ) + assert len(container_backend._adapter.networks_created) == 1 + + # Check environment if specified + if "env" in test_case.config: + container = container_backend._adapter.containers_created[0] + for key, value in test_case.config["env"].items(): + assert container["environment"][key] == value + + # Check packages if specified + if "packages" in test_case.config: + container = container_backend._adapter.containers_created[0] + command_str = " ".join(container["command"]) + assert "pip install" in command_str + for package in test_case.config["packages"]: + assert package in command_str + + except Exception as e: + assert type(e) is test_case.expected_error + print("test execution complete") + + +@pytest.mark.parametrize( + "test_case", + [ + TestCase( + name="list all jobs", + expected_status=SUCCESS, + config={"num_jobs": 2}, + ), + TestCase( + name="list empty jobs", + expected_status=SUCCESS, + config={"num_jobs": 0}, + ), + ], +) +def test_list_jobs(container_backend, test_case): + """Test listing training jobs.""" + print("Executing test:", test_case.name) + try: + runtime = container_backend.get_runtime("torch-distributed") + created_jobs = [] + + for _ in range(test_case.config["num_jobs"]): + trainer = types.CustomTrainer(func=simple_train_func, num_nodes=1) + job_name = container_backend.train(runtime=runtime, trainer=trainer) + created_jobs.append(job_name) + + jobs = container_backend.list_jobs() + + assert test_case.expected_status == SUCCESS + assert len(jobs) == test_case.config["num_jobs"] + if test_case.config["num_jobs"] > 0: + job_names = [job.name for job in jobs] + for created_job in created_jobs: + assert created_job in job_names + + except Exception as e: + assert type(e) is test_case.expected_error + print("test execution complete") + + +@pytest.mark.parametrize( + "test_case", + [ + TestCase( + name="get existing job", + expected_status=SUCCESS, + config={"num_nodes": 2}, + ), + TestCase( + name="get nonexistent job", + expected_status=FAILED, + config={"job_name": "nonexistent-job"}, + expected_error=ValueError, + ), + ], +) +def test_get_job(container_backend, test_case): + """Test getting a specific job.""" + print("Executing test:", test_case.name) + try: + if test_case.name == "get existing job": + trainer = types.CustomTrainer( + func=simple_train_func, num_nodes=test_case.config["num_nodes"] + ) + runtime = container_backend.get_runtime("torch-distributed") + job_name = container_backend.train(runtime=runtime, trainer=trainer) + + job = container_backend.get_job(job_name) + + assert test_case.expected_status == SUCCESS + assert job.name == job_name + assert job.num_nodes == test_case.config["num_nodes"] + assert len(job.steps) == test_case.config["num_nodes"] + + elif test_case.name == "get nonexistent job": + container_backend.get_job(test_case.config["job_name"]) + + except Exception as e: + assert type(e) is test_case.expected_error + print("test execution complete") + + +@pytest.mark.parametrize( + "test_case", + [ + TestCase( + name="get logs no follow", + expected_status=SUCCESS, + config={"follow": False}, + ), + TestCase( + name="get logs with follow", + expected_status=SUCCESS, + config={"follow": True}, + ), + ], +) +def test_get_job_logs(container_backend, test_case): + """Test getting job logs.""" + print("Executing test:", test_case.name) + try: + trainer = types.CustomTrainer(func=simple_train_func, num_nodes=1) + runtime = container_backend.get_runtime("torch-distributed") + job_name = container_backend.train(runtime=runtime, trainer=trainer) + + logs = list(container_backend.get_job_logs(job_name, follow=test_case.config["follow"])) + + assert test_case.expected_status == SUCCESS + assert len(logs) > 0 + if test_case.config["follow"]: + assert any("Log line" in log for log in logs) + else: + assert any("Complete log" in log for log in logs) + + except Exception as e: + assert type(e) is test_case.expected_error + print("test execution complete") + + +@pytest.mark.parametrize( + "test_case", + [ + TestCase( + name="wait for complete", + expected_status=SUCCESS, + config={"wait_status": constants.TRAINJOB_COMPLETE, "container_exit_code": 0}, + ), + TestCase( + name="wait timeout", + expected_status=FAILED, + config={"wait_status": constants.TRAINJOB_COMPLETE, "timeout": 2}, + expected_error=TimeoutError, + ), + TestCase( + name="job fails", + expected_status=FAILED, + config={"wait_status": constants.TRAINJOB_COMPLETE, "container_exit_code": 1}, + expected_error=RuntimeError, + ), + ], +) +def test_wait_for_job_status(container_backend, test_case): + """Test waiting for job status.""" + print("Executing test:", test_case.name) + try: + trainer = types.CustomTrainer(func=simple_train_func, num_nodes=1) + runtime = container_backend.get_runtime("torch-distributed") + job_name = container_backend.train(runtime=runtime, trainer=trainer) + + if test_case.name == "wait for complete": + container_id = container_backend._adapter.containers_created[0]["id"] + container_backend._adapter.set_container_status( + container_id, "exited", test_case.config["container_exit_code"] + ) + + completed_job = container_backend.wait_for_job_status( + job_name, status={test_case.config["wait_status"]}, timeout=5, polling_interval=1 + ) + + assert test_case.expected_status == SUCCESS + assert completed_job.status == constants.TRAINJOB_COMPLETE + + elif test_case.name == "wait timeout": + container_backend.wait_for_job_status( + job_name, + status={test_case.config["wait_status"]}, + timeout=test_case.config["timeout"], + polling_interval=1, + ) + + elif test_case.name == "job fails": + container_id = container_backend._adapter.containers_created[0]["id"] + container_backend._adapter.set_container_status( + container_id, "exited", test_case.config["container_exit_code"] + ) + + container_backend.wait_for_job_status( + job_name, status={test_case.config["wait_status"]}, timeout=5, polling_interval=1 + ) + + except Exception as e: + assert type(e) is test_case.expected_error + print("test execution complete") + + +@pytest.mark.parametrize( + "test_case", + [ + TestCase( + name="delete with auto_remove true", + expected_status=SUCCESS, + config={"auto_remove": True, "num_nodes": 2}, + ), + TestCase( + name="delete with auto_remove false", + expected_status=SUCCESS, + config={"auto_remove": False, "num_nodes": 2}, + ), + ], +) +def test_delete_job(container_backend, temp_workdir, test_case): + """Test deleting a job.""" + print("Executing test:", test_case.name) + try: + container_backend.cfg.workdir_base = temp_workdir + container_backend.cfg.auto_remove = test_case.config["auto_remove"] + + trainer = types.CustomTrainer( + func=simple_train_func, num_nodes=test_case.config["num_nodes"] + ) + runtime = container_backend.get_runtime("torch-distributed") + job_name = container_backend.train(runtime=runtime, trainer=trainer) + + job_workdir = Path(temp_workdir) / job_name + assert job_workdir.exists() + + container_backend.delete_job(job_name) + + assert test_case.expected_status == SUCCESS + assert len(container_backend._adapter.containers_stopped) == test_case.config["num_nodes"] + assert len(container_backend._adapter.containers_removed) == test_case.config["num_nodes"] + assert len(container_backend._adapter.networks_deleted) == 1 + + if test_case.config["auto_remove"]: + assert not job_workdir.exists() + else: + assert job_workdir.exists() + + except Exception as e: + assert type(e) is test_case.expected_error + print("test execution complete") + + +@pytest.mark.parametrize( + "test_case", + [ + TestCase( + name="running container", + expected_status=SUCCESS, + config={ + "container_status": "running", + "exit_code": None, + "expected_job_status": constants.TRAINJOB_RUNNING, + }, + ), + TestCase( + name="exited success", + expected_status=SUCCESS, + config={ + "container_status": "exited", + "exit_code": 0, + "expected_job_status": constants.TRAINJOB_COMPLETE, + }, + ), + TestCase( + name="exited failure", + expected_status=SUCCESS, + config={ + "container_status": "exited", + "exit_code": 1, + "expected_job_status": constants.TRAINJOB_FAILED, + }, + ), + ], +) +def test_container_status_mapping(container_backend, test_case): + """Test container status mapping to TrainJob status.""" + print("Executing test:", test_case.name) + try: + trainer = types.CustomTrainer(func=simple_train_func, num_nodes=1) + runtime = container_backend.get_runtime("torch-distributed") + job_name = container_backend.train(runtime=runtime, trainer=trainer) + + container_id = container_backend._adapter.containers_created[0]["id"] + container_backend._adapter.set_container_status( + container_id, test_case.config["container_status"], test_case.config["exit_code"] + ) + + job = container_backend.get_job(job_name) + + assert test_case.expected_status == SUCCESS + assert job.status == test_case.config["expected_job_status"] + + except Exception as e: + assert type(e) is test_case.expected_error + print("test execution complete") diff --git a/kubeflow/trainer/backends/container/client_adapter.py b/kubeflow/trainer/backends/container/client_adapter.py new file mode 100644 index 00000000..d7715fd8 --- /dev/null +++ b/kubeflow/trainer/backends/container/client_adapter.py @@ -0,0 +1,515 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Container client adapters for Docker and Podman. + +This module implements the adapter pattern to abstract away differences between +Docker and Podman APIs, allowing the backend to work with either runtime through +a common interface. +""" + +from __future__ import annotations + +import abc +from collections.abc import Iterator + + +class ContainerClientAdapter(abc.ABC): + """ + Abstract adapter interface for container clients. + + This adapter abstracts the container runtime API, allowing the backend + to work with Docker and Podman through a unified interface. + """ + + @abc.abstractmethod + def ping(self): + """Test the connection to the container runtime.""" + raise NotImplementedError() + + @abc.abstractmethod + def create_network( + self, + name: str, + labels: dict[str, str], + ) -> str: + """ + Create a container network. + + Args: + name: Network name + labels: Labels to attach to the network + + Returns: + Network ID or name + """ + raise NotImplementedError() + + @abc.abstractmethod + def delete_network(self, network_id: str): + """Delete a network.""" + raise NotImplementedError() + + @abc.abstractmethod + def create_and_start_container( + self, + image: str, + command: list[str], + name: str, + network_id: str, + environment: dict[str, str], + labels: dict[str, str], + volumes: dict[str, dict[str, str]], + working_dir: str, + ) -> str: + """ + Create and start a container. + + Args: + image: Container image + command: Command to run + name: Container name + network_id: Network to attach to + environment: Environment variables + labels: Container labels + volumes: Volume mounts + working_dir: Working directory + + Returns: + Container ID + """ + raise NotImplementedError() + + @abc.abstractmethod + def get_container(self, container_id: str): + """Get container object by ID.""" + raise NotImplementedError() + + @abc.abstractmethod + def container_logs(self, container_id: str, follow: bool) -> Iterator[str]: + """Stream logs from a container.""" + raise NotImplementedError() + + @abc.abstractmethod + def stop_container(self, container_id: str, timeout: int = 10): + """Stop a container.""" + raise NotImplementedError() + + @abc.abstractmethod + def remove_container(self, container_id: str, force: bool = True): + """Remove a container.""" + raise NotImplementedError() + + @abc.abstractmethod + def pull_image(self, image: str): + """Pull an image.""" + raise NotImplementedError() + + @abc.abstractmethod + def image_exists(self, image: str) -> bool: + """Check if an image exists locally.""" + raise NotImplementedError() + + @abc.abstractmethod + def run_oneoff_container(self, image: str, command: list[str]) -> str: + """ + Run a short-lived container and return its output. + + Args: + image: Container image + command: Command to run + + Returns: + Container output as string + """ + raise NotImplementedError() + + @abc.abstractmethod + def container_status(self, container_id: str) -> tuple[str, int | None]: + """ + Get container status. + + Returns: + Tuple of (status_string, exit_code) + Status strings: "running", "created", "exited", etc. + Exit code is None if container hasn't exited + """ + raise NotImplementedError() + + @abc.abstractmethod + def get_container_ip(self, container_id: str, network_id: str) -> str | None: + """ + Get container's IP address on a specific network. + + Args: + container_id: Container ID + network_id: Network name or ID + + Returns: + IP address string or None if not found + """ + raise NotImplementedError() + + +class DockerClientAdapter(ContainerClientAdapter): + """Adapter for Docker client.""" + + def __init__(self, host: str | None = None): + """ + Initialize Docker client. + + Args: + host: Docker host URL, or None to use environment defaults + """ + try: + import docker # type: ignore + except ImportError as e: + raise ImportError( + "The 'docker' Python package is not installed. Install with extras: " + "pip install kubeflow[docker]" + ) from e + + if host: + self.client = docker.DockerClient(base_url=host) + else: + self.client = docker.from_env() + + self._runtime_type = "docker" + + def ping(self): + """Test connection to Docker daemon.""" + self.client.ping() + + def create_network(self, name: str, labels: dict[str, str]) -> str: + """Create a Docker network.""" + try: + self.client.networks.get(name) + return name + except Exception: + pass + + self.client.networks.create( + name=name, + check_duplicate=True, + labels=labels, + ) + return name + + def delete_network(self, network_id: str): + """Delete Docker network.""" + try: + net = self.client.networks.get(network_id) + net.remove() + except Exception: + pass + + def create_and_start_container( + self, + image: str, + command: list[str], + name: str, + network_id: str, + environment: dict[str, str], + labels: dict[str, str], + volumes: dict[str, dict[str, str]], + working_dir: str, + ) -> str: + """Create and start a Docker container.""" + container = self.client.containers.run( + image=image, + command=tuple(command), + name=name, + detach=True, + working_dir=working_dir, + network=network_id, + environment=environment, + labels=labels, + volumes=volumes, + auto_remove=False, + ) + return container.id + + def get_container(self, container_id: str): + """Get Docker container by ID.""" + return self.client.containers.get(container_id) + + def container_logs(self, container_id: str, follow: bool) -> Iterator[str]: + """Stream logs from Docker container.""" + container = self.get_container(container_id) + logs = container.logs(stream=bool(follow), follow=bool(follow)) + if follow: + for chunk in logs: + if isinstance(chunk, bytes): + yield chunk.decode("utf-8", errors="ignore") + else: + yield str(chunk) + else: + if isinstance(logs, bytes): + yield logs.decode("utf-8", errors="ignore") + else: + yield str(logs) + + def stop_container(self, container_id: str, timeout: int = 10): + """Stop Docker container.""" + container = self.get_container(container_id) + container.stop(timeout=timeout) + + def remove_container(self, container_id: str, force: bool = True): + """Remove Docker container.""" + container = self.get_container(container_id) + container.remove(force=force) + + def pull_image(self, image: str): + """Pull Docker image.""" + self.client.images.pull(image) + + def image_exists(self, image: str) -> bool: + """Check if Docker image exists locally.""" + try: + self.client.images.get(image) + return True + except Exception: + return False + + def run_oneoff_container(self, image: str, command: list[str]) -> str: + """Run a short-lived Docker container and return output.""" + try: + output = self.client.containers.run( + image=image, + command=tuple(command), + detach=False, + remove=True, + ) + if isinstance(output, (bytes, bytearray)): + return output.decode("utf-8", errors="ignore") + return str(output) + except Exception as e: + raise RuntimeError(f"One-off container failed to run: {e}") from e + + def container_status(self, container_id: str) -> tuple[str, int | None]: + """Get Docker container status.""" + try: + container = self.get_container(container_id) + status = container.status + # Get exit code if container has exited + exit_code = None + if status == "exited": + inspect = container.attrs if hasattr(container, "attrs") else container.inspect() + exit_code = inspect.get("State", {}).get("ExitCode") + return (status, exit_code) + except Exception: + return ("unknown", None) + + def get_container_ip(self, container_id: str, network_id: str) -> str | None: + """Get container's IP address on a specific network.""" + try: + container = self.get_container(container_id) + # Refresh container info + container.reload() + # Get network settings + networks = container.attrs.get("NetworkSettings", {}).get("Networks", {}) + + # Try to find the network by exact name or ID + if network_id in networks: + return networks[network_id].get("IPAddress") + + # Fallback: return first available IP + for net_name, net_info in networks.items(): + ip = net_info.get("IPAddress") + if ip: + return ip + + return None + except Exception: + return None + + +class PodmanClientAdapter(ContainerClientAdapter): + """Adapter for Podman client.""" + + def __init__(self, host: str | None = None): + """ + Initialize Podman client. + + Args: + host: Podman host URL, or None to use environment defaults + """ + try: + import podman # type: ignore + except ImportError as e: + raise ImportError( + "The 'podman' Python package is not installed. Install with extras: " + "pip install kubeflow[podman]" + ) from e + + if host: + self.client = podman.PodmanClient(base_url=host) + else: + self.client = podman.PodmanClient() + + self._runtime_type = "podman" + + def ping(self): + """Test connection to Podman.""" + self.client.ping() + + def create_network(self, name: str, labels: dict[str, str]) -> str: + """Create a Podman network with DNS enabled.""" + try: + self.client.networks.get(name) + return name + except Exception: + pass + + self.client.networks.create( + name=name, + driver="bridge", + dns_enabled=True, + labels=labels, + ) + return name + + def delete_network(self, network_id: str): + """Delete Podman network.""" + try: + net = self.client.networks.get(network_id) + net.remove() + except Exception: + pass + + def create_and_start_container( + self, + image: str, + command: list[str], + name: str, + network_id: str, + environment: dict[str, str], + labels: dict[str, str], + volumes: dict[str, dict[str, str]], + working_dir: str, + ) -> str: + """Create and start a Podman container.""" + container = self.client.containers.run( + image=image, + command=command, + name=name, + network=network_id, + working_dir=working_dir, + environment=environment, + labels=labels, + volumes=volumes, + detach=True, + remove=False, + ) + return container.id + + def get_container(self, container_id: str): + """Get Podman container by ID.""" + return self.client.containers.get(container_id) + + def container_logs(self, container_id: str, follow: bool) -> Iterator[str]: + """Stream logs from Podman container.""" + container = self.get_container(container_id) + logs = container.logs(stream=bool(follow), follow=bool(follow)) + if follow: + for chunk in logs: + if isinstance(chunk, bytes): + yield chunk.decode("utf-8", errors="ignore") + else: + yield str(chunk) + else: + if isinstance(logs, bytes): + yield logs.decode("utf-8", errors="ignore") + else: + yield str(logs) + + def stop_container(self, container_id: str, timeout: int = 10): + """Stop Podman container.""" + container = self.get_container(container_id) + container.stop(timeout=timeout) + + def remove_container(self, container_id: str, force: bool = True): + """Remove Podman container.""" + container = self.get_container(container_id) + container.remove(force=force) + + def pull_image(self, image: str): + """Pull Podman image.""" + self.client.images.pull(image) + + def image_exists(self, image: str) -> bool: + """Check if Podman image exists locally.""" + try: + self.client.images.get(image) + return True + except Exception: + return False + + def run_oneoff_container(self, image: str, command: list[str]) -> str: + """Run a short-lived Podman container and return output.""" + try: + container = self.client.containers.create( + image=image, + command=command, + detach=False, + remove=True, + ) + container.start() + container.wait() + logs = container.logs() + + if isinstance(logs, (bytes, bytearray)): + return logs.decode("utf-8", errors="ignore") + return str(logs) + except Exception as e: + raise RuntimeError(f"One-off container failed to run: {e}") from e + + def container_status(self, container_id: str) -> tuple[str, int | None]: + """Get Podman container status.""" + try: + container = self.get_container(container_id) + status = container.status + # Get exit code if container has exited + exit_code = None + if status == "exited": + inspect = container.attrs if hasattr(container, "attrs") else container.inspect() + exit_code = inspect.get("State", {}).get("ExitCode") + return (status, exit_code) + except Exception: + return ("unknown", None) + + def get_container_ip(self, container_id: str, network_id: str) -> str | None: + """Get container's IP address on a specific network.""" + try: + container = self.get_container(container_id) + # Get container inspect data + inspect = container.attrs if hasattr(container, "attrs") else container.inspect() + + # Get network settings - Podman structure is similar to Docker + networks = inspect.get("NetworkSettings", {}).get("Networks", {}) + + # Try to find the network by exact name or ID + if network_id in networks: + return networks[network_id].get("IPAddress") + + # Fallback: return first available IP + for net_name, net_info in networks.items(): + ip = net_info.get("IPAddress") + if ip: + return ip + + return None + except Exception: + return None diff --git a/kubeflow/trainer/backends/container/runtime_loader.py b/kubeflow/trainer/backends/container/runtime_loader.py new file mode 100644 index 00000000..6b9bdf06 --- /dev/null +++ b/kubeflow/trainer/backends/container/runtime_loader.py @@ -0,0 +1,27 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Runtime loader for the Container backend. + +This module provides container-agnostic imports for the shared local runtime loader. +""" + +from kubeflow.trainer.backends.local_runtime_loader import ( + LOCAL_RUNTIMES_DIR, + get_local_runtime, + list_local_runtimes, +) + +__all__ = ["LOCAL_RUNTIMES_DIR", "get_local_runtime", "list_local_runtimes"] diff --git a/kubeflow/trainer/backends/container/types.py b/kubeflow/trainer/backends/container/types.py new file mode 100644 index 00000000..ae1ce207 --- /dev/null +++ b/kubeflow/trainer/backends/container/types.py @@ -0,0 +1,56 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Types and configuration for the unified Container backend. + +This backend automatically detects and uses either Docker or Podman. +It provides a single interface for container-based execution regardless +of the underlying runtime. + +Configuration options: + - image: Optional explicit image. If omitted, use the image referenced by the + selected runtime (e.g., torch_distributed) from `config/local_runtimes`. + - pull_policy: Controls image pulling. Supported values: "IfNotPresent", + "Always", "Never". The default is "IfNotPresent". + - auto_remove: Whether to remove containers and networks when jobs are deleted. + Defaults to True. + - gpus: GPU support (implementation varies between Docker and Podman). + Defaults to None. + - env: Optional global environment variables applied to all containers. + - container_host: Optional override for connecting to a remote/local container + daemon. By default, auto-detects from environment or uses system defaults. + For Docker: uses DOCKER_HOST or default socket. + For Podman: uses CONTAINER_HOST or default socket. + - workdir_base: Base directory on the host to place per-job working dirs that + are bind-mounted into containers as /workspace. Defaults to a path under the + user's home directory for compatibility. + - runtime: Force use of a specific container runtime ("docker" or "podman"). + If not set, auto-detects based on availability (tries Docker first, then Podman). +""" + +from typing import Literal, Optional, Union + +from pydantic import BaseModel, Field + + +class ContainerBackendConfig(BaseModel): + image: Optional[str] = Field(default=None) + pull_policy: str = Field(default="IfNotPresent") + auto_remove: bool = Field(default=True) + gpus: Optional[Union[int, bool]] = Field(default=None) + env: Optional[dict[str, str]] = Field(default=None) + container_host: Optional[str] = Field(default=None) + workdir_base: Optional[str] = Field(default=None) + runtime: Optional[Literal["docker", "podman"]] = Field(default=None) diff --git a/kubeflow/trainer/backends/local_runtime_loader.py b/kubeflow/trainer/backends/local_runtime_loader.py new file mode 100644 index 00000000..24f34c11 --- /dev/null +++ b/kubeflow/trainer/backends/local_runtime_loader.py @@ -0,0 +1,110 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Shared runtime loader for local container backends (Docker, Podman). + +We support loading local runtime definitions from +`kubeflow/trainer/config/local_runtimes/` (YAML files). The schema mirrors the +essential fields from the upstream CRD manifest but is tailored for local +container backends to capture the container image and trainer characteristics +needed to construct a `types.Runtime` object. + +We ship a built-in `torch_distributed.yaml` as the default runtime. Users can +add additional YAML files to the same directory to define custom local runtimes. +""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any + +import yaml + +from kubeflow.trainer.types import types as base_types + +LOCAL_RUNTIMES_DIR = Path(__file__).parents[1] / "config" / "local_runtimes" + + +def _load_runtime_from_yaml(path: Path) -> dict[str, Any]: + with open(path) as f: + data: dict[str, Any] = yaml.safe_load(f) + return data + + +def list_local_runtimes() -> list[base_types.Runtime]: + runtimes: list[base_types.Runtime] = [] + if not LOCAL_RUNTIMES_DIR.exists(): + return runtimes + + for f in sorted(LOCAL_RUNTIMES_DIR.glob("*.yaml")): + data = _load_runtime_from_yaml(f) + + # Require CRD-like schema strictly. Accept both ClusterTrainingRuntime + # and TrainingRuntime kinds. + if not ( + data.get("kind") in {"ClusterTrainingRuntime", "TrainingRuntime"} + and data.get("metadata") + ): + raise ValueError( + f"Runtime YAML {f} must be a ClusterTrainingRuntime CRD-shaped document" + ) + + name = data["metadata"].get("name") + if not name: + raise ValueError(f"Runtime YAML {f} missing metadata.name") + + labels = data["metadata"].get("labels", {}) + framework = labels.get("trainer.kubeflow.org/framework") + if not framework: + raise ValueError( + f"Runtime {name} must set metadata.labels['trainer.kubeflow.org/framework']" + ) + + spec = data.get("spec", {}) + ml_policy = spec.get("mlPolicy", {}) + num_nodes = int(ml_policy.get("numNodes", 1)) + + # Validate presence of a 'node' replicated job with a container image + templ = spec.get("template", {}).get("spec", {}) + replicated = templ.get("replicatedJobs", []) + node_jobs = [j for j in replicated if j.get("name") == "node"] + if not node_jobs: + raise ValueError(f"Runtime {name} must define replicatedJobs with a 'node' entry") + node_spec = ( + node_jobs[0].get("template", {}).get("spec", {}).get("template", {}).get("spec", {}) + ) + containers = node_spec.get("containers", []) + if not containers or not containers[0].get("image"): + raise ValueError(f"Runtime {name} 'node' must specify containers[0].image") + + runtimes.append( + base_types.Runtime( + name=name, + trainer=base_types.RuntimeTrainer( + trainer_type=base_types.TrainerType.CUSTOM_TRAINER, + framework=framework, + num_nodes=num_nodes, + ), + pretrained_model=None, + ) + ) + return runtimes + + +def get_local_runtime(name: str) -> base_types.Runtime: + for rt in list_local_runtimes(): + if rt.name == name: + return rt + raise ValueError(f"Runtime '{name}' not found in {LOCAL_RUNTIMES_DIR}") diff --git a/kubeflow/trainer/config/local_runtimes/torch_distributed.yaml b/kubeflow/trainer/config/local_runtimes/torch_distributed.yaml new file mode 100644 index 00000000..60d36cfb --- /dev/null +++ b/kubeflow/trainer/config/local_runtimes/torch_distributed.yaml @@ -0,0 +1,25 @@ +apiVersion: trainer.kubeflow.org/v1alpha1 +kind: ClusterTrainingRuntime +metadata: + name: torch-distributed + labels: + trainer.kubeflow.org/framework: torch +spec: + mlPolicy: + numNodes: 1 + torch: + numProcPerNode: auto + template: + spec: + replicatedJobs: + - name: node + template: + metadata: + labels: + trainer.kubeflow.org/trainjob-ancestor-step: trainer + spec: + template: + spec: + containers: + - name: node + image: pytorch/pytorch:2.7.1-cuda12.8-cudnn9-runtime diff --git a/pyproject.toml b/pyproject.toml index 0f3072dc..1b5681b0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,14 @@ dependencies = [ "kubeflow-trainer-api>=2.0.0", ] +[project.optional-dependencies] +docker = [ + "docker>=6.1.3", +] +podman = [ + "podman>=5.6.0" +] + [dependency-groups] dev = [ "pytest>=7.0", diff --git a/uv.lock b/uv.lock index fe6ae844..caee53ac 100644 --- a/uv.lock +++ b/uv.lock @@ -36,66 +36,66 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 }, - { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 }, - { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 }, - { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 }, - { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 }, - { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 }, - { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 }, - { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 }, - { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 }, - { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 }, - { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 }, - { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 }, - { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, - { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, - { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, - { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, - { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, - { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, - { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, - { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, - { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, - { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, - { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, - { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, - { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, - { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, - { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, - { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, - { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, - { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, - { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, - { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, - { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, - { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, - { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, - { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, - { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, - { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, - { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, - { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, - { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, - { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, - { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, - { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, - { url = "https://files.pythonhosted.org/packages/b9/ea/8bb50596b8ffbc49ddd7a1ad305035daa770202a6b782fc164647c2673ad/cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", size = 182220 }, - { url = "https://files.pythonhosted.org/packages/ae/11/e77c8cd24f58285a82c23af484cf5b124a376b32644e445960d1a4654c3a/cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", size = 178605 }, - { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910 }, - { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200 }, - { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565 }, - { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635 }, - { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218 }, - { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486 }, - { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911 }, - { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632 }, - { url = "https://files.pythonhosted.org/packages/cb/b5/fd9f8b5a84010ca169ee49f4e4ad6f8c05f4e3545b72ee041dbbcb159882/cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", size = 171820 }, - { url = "https://files.pythonhosted.org/packages/8c/52/b08750ce0bce45c143e1b5d7357ee8c55341b52bdef4b0f081af1eb248c2/cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", size = 181290 }, +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191, upload-time = "2024-09-04T20:43:30.027Z" }, + { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592, upload-time = "2024-09-04T20:43:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024, upload-time = "2024-09-04T20:43:34.186Z" }, + { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188, upload-time = "2024-09-04T20:43:36.286Z" }, + { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571, upload-time = "2024-09-04T20:43:38.586Z" }, + { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687, upload-time = "2024-09-04T20:43:40.084Z" }, + { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211, upload-time = "2024-09-04T20:43:41.526Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325, upload-time = "2024-09-04T20:43:43.117Z" }, + { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784, upload-time = "2024-09-04T20:43:45.256Z" }, + { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564, upload-time = "2024-09-04T20:43:46.779Z" }, + { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804, upload-time = "2024-09-04T20:43:48.186Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299, upload-time = "2024-09-04T20:43:49.812Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ea/8bb50596b8ffbc49ddd7a1ad305035daa770202a6b782fc164647c2673ad/cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", size = 182220, upload-time = "2024-09-04T20:45:01.577Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/e77c8cd24f58285a82c23af484cf5b124a376b32644e445960d1a4654c3a/cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", size = 178605, upload-time = "2024-09-04T20:45:03.837Z" }, + { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910, upload-time = "2024-09-04T20:45:05.315Z" }, + { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200, upload-time = "2024-09-04T20:45:06.903Z" }, + { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565, upload-time = "2024-09-04T20:45:08.975Z" }, + { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635, upload-time = "2024-09-04T20:45:10.64Z" }, + { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218, upload-time = "2024-09-04T20:45:12.366Z" }, + { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486, upload-time = "2024-09-04T20:45:13.935Z" }, + { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911, upload-time = "2024-09-04T20:45:15.696Z" }, + { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632, upload-time = "2024-09-04T20:45:17.284Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b5/fd9f8b5a84010ca169ee49f4e4ad6f8c05f4e3545b72ee041dbbcb159882/cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", size = 171820, upload-time = "2024-09-04T20:45:18.762Z" }, + { url = "https://files.pythonhosted.org/packages/8c/52/b08750ce0bce45c143e1b5d7357ee8c55341b52bdef4b0f081af1eb248c2/cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", size = 181290, upload-time = "2024-09-04T20:45:20.226Z" }, ] [[package]] @@ -292,44 +292,44 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d6/0d/d13399c94234ee8f3df384819dc67e0c5ce215fb751d567a55a1f4b028c7/cryptography-45.0.6.tar.gz", hash = "sha256:5c966c732cf6e4a276ce83b6e4c729edda2df6929083a952cc7da973c539c719", size = 744949 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/29/2793d178d0eda1ca4a09a7c4e09a5185e75738cc6d526433e8663b460ea6/cryptography-45.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:048e7ad9e08cf4c0ab07ff7f36cc3115924e22e2266e034450a890d9e312dd74", size = 7042702 }, - { url = "https://files.pythonhosted.org/packages/b3/b6/cabd07410f222f32c8d55486c464f432808abaa1f12af9afcbe8f2f19030/cryptography-45.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44647c5d796f5fc042bbc6d61307d04bf29bccb74d188f18051b635f20a9c75f", size = 4206483 }, - { url = "https://files.pythonhosted.org/packages/8b/9e/f9c7d36a38b1cfeb1cc74849aabe9bf817990f7603ff6eb485e0d70e0b27/cryptography-45.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e40b80ecf35ec265c452eea0ba94c9587ca763e739b8e559c128d23bff7ebbbf", size = 4429679 }, - { url = "https://files.pythonhosted.org/packages/9c/2a/4434c17eb32ef30b254b9e8b9830cee4e516f08b47fdd291c5b1255b8101/cryptography-45.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:00e8724bdad672d75e6f069b27970883179bd472cd24a63f6e620ca7e41cc0c5", size = 4210553 }, - { url = "https://files.pythonhosted.org/packages/ef/1d/09a5df8e0c4b7970f5d1f3aff1b640df6d4be28a64cae970d56c6cf1c772/cryptography-45.0.6-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a3085d1b319d35296176af31c90338eeb2ddac8104661df79f80e1d9787b8b2", size = 3894499 }, - { url = "https://files.pythonhosted.org/packages/79/62/120842ab20d9150a9d3a6bdc07fe2870384e82f5266d41c53b08a3a96b34/cryptography-45.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1b7fa6a1c1188c7ee32e47590d16a5a0646270921f8020efc9a511648e1b2e08", size = 4458484 }, - { url = "https://files.pythonhosted.org/packages/fd/80/1bc3634d45ddfed0871bfba52cf8f1ad724761662a0c792b97a951fb1b30/cryptography-45.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:275ba5cc0d9e320cd70f8e7b96d9e59903c815ca579ab96c1e37278d231fc402", size = 4210281 }, - { url = "https://files.pythonhosted.org/packages/7d/fe/ffb12c2d83d0ee625f124880a1f023b5878f79da92e64c37962bbbe35f3f/cryptography-45.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42", size = 4456890 }, - { url = "https://files.pythonhosted.org/packages/8c/8e/b3f3fe0dc82c77a0deb5f493b23311e09193f2268b77196ec0f7a36e3f3e/cryptography-45.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee411a1b977f40bd075392c80c10b58025ee5c6b47a822a33c1198598a7a5f05", size = 4333247 }, - { url = "https://files.pythonhosted.org/packages/b3/a6/c3ef2ab9e334da27a1d7b56af4a2417d77e7806b2e0f90d6267ce120d2e4/cryptography-45.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2a21a8eda2d86bb604934b6b37691585bd095c1f788530c1fcefc53a82b3453", size = 4565045 }, - { url = "https://files.pythonhosted.org/packages/31/c3/77722446b13fa71dddd820a5faab4ce6db49e7e0bf8312ef4192a3f78e2f/cryptography-45.0.6-cp311-abi3-win32.whl", hash = "sha256:d063341378d7ee9c91f9d23b431a3502fc8bfacd54ef0a27baa72a0843b29159", size = 2928923 }, - { url = "https://files.pythonhosted.org/packages/38/63/a025c3225188a811b82932a4dcc8457a26c3729d81578ccecbcce2cb784e/cryptography-45.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:833dc32dfc1e39b7376a87b9a6a4288a10aae234631268486558920029b086ec", size = 3403805 }, - { url = "https://files.pythonhosted.org/packages/5b/af/bcfbea93a30809f126d51c074ee0fac5bd9d57d068edf56c2a73abedbea4/cryptography-45.0.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:3436128a60a5e5490603ab2adbabc8763613f638513ffa7d311c900a8349a2a0", size = 7020111 }, - { url = "https://files.pythonhosted.org/packages/98/c6/ea5173689e014f1a8470899cd5beeb358e22bb3cf5a876060f9d1ca78af4/cryptography-45.0.6-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0d9ef57b6768d9fa58e92f4947cea96ade1233c0e236db22ba44748ffedca394", size = 4198169 }, - { url = "https://files.pythonhosted.org/packages/ba/73/b12995edc0c7e2311ffb57ebd3b351f6b268fed37d93bfc6f9856e01c473/cryptography-45.0.6-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea3c42f2016a5bbf71825537c2ad753f2870191134933196bee408aac397b3d9", size = 4421273 }, - { url = "https://files.pythonhosted.org/packages/f7/6e/286894f6f71926bc0da67408c853dd9ba953f662dcb70993a59fd499f111/cryptography-45.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:20ae4906a13716139d6d762ceb3e0e7e110f7955f3bc3876e3a07f5daadec5f3", size = 4199211 }, - { url = "https://files.pythonhosted.org/packages/de/34/a7f55e39b9623c5cb571d77a6a90387fe557908ffc44f6872f26ca8ae270/cryptography-45.0.6-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dac5ec199038b8e131365e2324c03d20e97fe214af051d20c49db129844e8b3", size = 3883732 }, - { url = "https://files.pythonhosted.org/packages/f9/b9/c6d32edbcba0cd9f5df90f29ed46a65c4631c4fbe11187feb9169c6ff506/cryptography-45.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f878a34b90d688982e43f4b700408b478102dd58b3e39de21b5ebf6509c301", size = 4450655 }, - { url = "https://files.pythonhosted.org/packages/77/2d/09b097adfdee0227cfd4c699b3375a842080f065bab9014248933497c3f9/cryptography-45.0.6-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5bd6020c80c5b2b2242d6c48487d7b85700f5e0038e67b29d706f98440d66eb5", size = 4198956 }, - { url = "https://files.pythonhosted.org/packages/55/66/061ec6689207d54effdff535bbdf85cc380d32dd5377173085812565cf38/cryptography-45.0.6-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:eccddbd986e43014263eda489abbddfbc287af5cddfd690477993dbb31e31016", size = 4449859 }, - { url = "https://files.pythonhosted.org/packages/41/ff/e7d5a2ad2d035e5a2af116e1a3adb4d8fcd0be92a18032917a089c6e5028/cryptography-45.0.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:550ae02148206beb722cfe4ef0933f9352bab26b087af00e48fdfb9ade35c5b3", size = 4320254 }, - { url = "https://files.pythonhosted.org/packages/82/27/092d311af22095d288f4db89fcaebadfb2f28944f3d790a4cf51fe5ddaeb/cryptography-45.0.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b64e668fc3528e77efa51ca70fadcd6610e8ab231e3e06ae2bab3b31c2b8ed9", size = 4554815 }, - { url = "https://files.pythonhosted.org/packages/7e/01/aa2f4940262d588a8fdf4edabe4cda45854d00ebc6eaac12568b3a491a16/cryptography-45.0.6-cp37-abi3-win32.whl", hash = "sha256:780c40fb751c7d2b0c6786ceee6b6f871e86e8718a8ff4bc35073ac353c7cd02", size = 2912147 }, - { url = "https://files.pythonhosted.org/packages/0a/bc/16e0276078c2de3ceef6b5a34b965f4436215efac45313df90d55f0ba2d2/cryptography-45.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:20d15aed3ee522faac1a39fbfdfee25d17b1284bafd808e1640a74846d7c4d1b", size = 3390459 }, - { url = "https://files.pythonhosted.org/packages/56/d2/4482d97c948c029be08cb29854a91bd2ae8da7eb9c4152461f1244dcea70/cryptography-45.0.6-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:705bb7c7ecc3d79a50f236adda12ca331c8e7ecfbea51edd931ce5a7a7c4f012", size = 3576812 }, - { url = "https://files.pythonhosted.org/packages/ec/24/55fc238fcaa122855442604b8badb2d442367dfbd5a7ca4bb0bd346e263a/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:826b46dae41a1155a0c0e66fafba43d0ede1dc16570b95e40c4d83bfcf0a451d", size = 4141694 }, - { url = "https://files.pythonhosted.org/packages/f9/7e/3ea4fa6fbe51baf3903806a0241c666b04c73d2358a3ecce09ebee8b9622/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cc4d66f5dc4dc37b89cfef1bd5044387f7a1f6f0abb490815628501909332d5d", size = 4375010 }, - { url = "https://files.pythonhosted.org/packages/50/42/ec5a892d82d2a2c29f80fc19ced4ba669bca29f032faf6989609cff1f8dc/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f68f833a9d445cc49f01097d95c83a850795921b3f7cc6488731e69bde3288da", size = 4141377 }, - { url = "https://files.pythonhosted.org/packages/e7/d7/246c4c973a22b9c2931999da953a2c19cae7c66b9154c2d62ffed811225e/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3b5bf5267e98661b9b888a9250d05b063220dfa917a8203744454573c7eb79db", size = 4374609 }, - { url = "https://files.pythonhosted.org/packages/78/6d/c49ccf243f0a1b0781c2a8de8123ee552f0c8a417c6367a24d2ecb7c11b3/cryptography-45.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2384f2ab18d9be88a6e4f8972923405e2dbb8d3e16c6b43f15ca491d7831bd18", size = 3322156 }, - { url = "https://files.pythonhosted.org/packages/61/69/c252de4ec047ba2f567ecb53149410219577d408c2aea9c989acae7eafce/cryptography-45.0.6-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fc022c1fa5acff6def2fc6d7819bbbd31ccddfe67d075331a65d9cfb28a20983", size = 3584669 }, - { url = "https://files.pythonhosted.org/packages/e3/fe/deea71e9f310a31fe0a6bfee670955152128d309ea2d1c79e2a5ae0f0401/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3de77e4df42ac8d4e4d6cdb342d989803ad37707cf8f3fbf7b088c9cbdd46427", size = 4153022 }, - { url = "https://files.pythonhosted.org/packages/60/45/a77452f5e49cb580feedba6606d66ae7b82c128947aa754533b3d1bd44b0/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:599c8d7df950aa68baa7e98f7b73f4f414c9f02d0e8104a30c0182a07732638b", size = 4386802 }, - { url = "https://files.pythonhosted.org/packages/a3/b9/a2f747d2acd5e3075fdf5c145c7c3568895daaa38b3b0c960ef830db6cdc/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:31a2b9a10530a1cb04ffd6aa1cd4d3be9ed49f7d77a4dafe198f3b382f41545c", size = 4152706 }, - { url = "https://files.pythonhosted.org/packages/81/ec/381b3e8d0685a3f3f304a382aa3dfce36af2d76467da0fd4bb21ddccc7b2/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e5b3dda1b00fb41da3af4c5ef3f922a200e33ee5ba0f0bc9ecf0b0c173958385", size = 4386740 }, - { url = "https://files.pythonhosted.org/packages/0a/76/cf8d69da8d0b5ecb0db406f24a63a3f69ba5e791a11b782aeeefef27ccbb/cryptography-45.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:629127cfdcdc6806dfe234734d7cb8ac54edaf572148274fa377a7d3405b0043", size = 3331874 }, +sdist = { url = "https://files.pythonhosted.org/packages/d6/0d/d13399c94234ee8f3df384819dc67e0c5ce215fb751d567a55a1f4b028c7/cryptography-45.0.6.tar.gz", hash = "sha256:5c966c732cf6e4a276ce83b6e4c729edda2df6929083a952cc7da973c539c719", size = 744949, upload-time = "2025-08-05T23:59:27.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/29/2793d178d0eda1ca4a09a7c4e09a5185e75738cc6d526433e8663b460ea6/cryptography-45.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:048e7ad9e08cf4c0ab07ff7f36cc3115924e22e2266e034450a890d9e312dd74", size = 7042702, upload-time = "2025-08-05T23:58:23.464Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b6/cabd07410f222f32c8d55486c464f432808abaa1f12af9afcbe8f2f19030/cryptography-45.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44647c5d796f5fc042bbc6d61307d04bf29bccb74d188f18051b635f20a9c75f", size = 4206483, upload-time = "2025-08-05T23:58:27.132Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9e/f9c7d36a38b1cfeb1cc74849aabe9bf817990f7603ff6eb485e0d70e0b27/cryptography-45.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e40b80ecf35ec265c452eea0ba94c9587ca763e739b8e559c128d23bff7ebbbf", size = 4429679, upload-time = "2025-08-05T23:58:29.152Z" }, + { url = "https://files.pythonhosted.org/packages/9c/2a/4434c17eb32ef30b254b9e8b9830cee4e516f08b47fdd291c5b1255b8101/cryptography-45.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:00e8724bdad672d75e6f069b27970883179bd472cd24a63f6e620ca7e41cc0c5", size = 4210553, upload-time = "2025-08-05T23:58:30.596Z" }, + { url = "https://files.pythonhosted.org/packages/ef/1d/09a5df8e0c4b7970f5d1f3aff1b640df6d4be28a64cae970d56c6cf1c772/cryptography-45.0.6-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a3085d1b319d35296176af31c90338eeb2ddac8104661df79f80e1d9787b8b2", size = 3894499, upload-time = "2025-08-05T23:58:32.03Z" }, + { url = "https://files.pythonhosted.org/packages/79/62/120842ab20d9150a9d3a6bdc07fe2870384e82f5266d41c53b08a3a96b34/cryptography-45.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1b7fa6a1c1188c7ee32e47590d16a5a0646270921f8020efc9a511648e1b2e08", size = 4458484, upload-time = "2025-08-05T23:58:33.526Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/1bc3634d45ddfed0871bfba52cf8f1ad724761662a0c792b97a951fb1b30/cryptography-45.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:275ba5cc0d9e320cd70f8e7b96d9e59903c815ca579ab96c1e37278d231fc402", size = 4210281, upload-time = "2025-08-05T23:58:35.445Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fe/ffb12c2d83d0ee625f124880a1f023b5878f79da92e64c37962bbbe35f3f/cryptography-45.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42", size = 4456890, upload-time = "2025-08-05T23:58:36.923Z" }, + { url = "https://files.pythonhosted.org/packages/8c/8e/b3f3fe0dc82c77a0deb5f493b23311e09193f2268b77196ec0f7a36e3f3e/cryptography-45.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee411a1b977f40bd075392c80c10b58025ee5c6b47a822a33c1198598a7a5f05", size = 4333247, upload-time = "2025-08-05T23:58:38.781Z" }, + { url = "https://files.pythonhosted.org/packages/b3/a6/c3ef2ab9e334da27a1d7b56af4a2417d77e7806b2e0f90d6267ce120d2e4/cryptography-45.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2a21a8eda2d86bb604934b6b37691585bd095c1f788530c1fcefc53a82b3453", size = 4565045, upload-time = "2025-08-05T23:58:40.415Z" }, + { url = "https://files.pythonhosted.org/packages/31/c3/77722446b13fa71dddd820a5faab4ce6db49e7e0bf8312ef4192a3f78e2f/cryptography-45.0.6-cp311-abi3-win32.whl", hash = "sha256:d063341378d7ee9c91f9d23b431a3502fc8bfacd54ef0a27baa72a0843b29159", size = 2928923, upload-time = "2025-08-05T23:58:41.919Z" }, + { url = "https://files.pythonhosted.org/packages/38/63/a025c3225188a811b82932a4dcc8457a26c3729d81578ccecbcce2cb784e/cryptography-45.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:833dc32dfc1e39b7376a87b9a6a4288a10aae234631268486558920029b086ec", size = 3403805, upload-time = "2025-08-05T23:58:43.792Z" }, + { url = "https://files.pythonhosted.org/packages/5b/af/bcfbea93a30809f126d51c074ee0fac5bd9d57d068edf56c2a73abedbea4/cryptography-45.0.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:3436128a60a5e5490603ab2adbabc8763613f638513ffa7d311c900a8349a2a0", size = 7020111, upload-time = "2025-08-05T23:58:45.316Z" }, + { url = "https://files.pythonhosted.org/packages/98/c6/ea5173689e014f1a8470899cd5beeb358e22bb3cf5a876060f9d1ca78af4/cryptography-45.0.6-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0d9ef57b6768d9fa58e92f4947cea96ade1233c0e236db22ba44748ffedca394", size = 4198169, upload-time = "2025-08-05T23:58:47.121Z" }, + { url = "https://files.pythonhosted.org/packages/ba/73/b12995edc0c7e2311ffb57ebd3b351f6b268fed37d93bfc6f9856e01c473/cryptography-45.0.6-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea3c42f2016a5bbf71825537c2ad753f2870191134933196bee408aac397b3d9", size = 4421273, upload-time = "2025-08-05T23:58:48.557Z" }, + { url = "https://files.pythonhosted.org/packages/f7/6e/286894f6f71926bc0da67408c853dd9ba953f662dcb70993a59fd499f111/cryptography-45.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:20ae4906a13716139d6d762ceb3e0e7e110f7955f3bc3876e3a07f5daadec5f3", size = 4199211, upload-time = "2025-08-05T23:58:50.139Z" }, + { url = "https://files.pythonhosted.org/packages/de/34/a7f55e39b9623c5cb571d77a6a90387fe557908ffc44f6872f26ca8ae270/cryptography-45.0.6-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dac5ec199038b8e131365e2324c03d20e97fe214af051d20c49db129844e8b3", size = 3883732, upload-time = "2025-08-05T23:58:52.253Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b9/c6d32edbcba0cd9f5df90f29ed46a65c4631c4fbe11187feb9169c6ff506/cryptography-45.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f878a34b90d688982e43f4b700408b478102dd58b3e39de21b5ebf6509c301", size = 4450655, upload-time = "2025-08-05T23:58:53.848Z" }, + { url = "https://files.pythonhosted.org/packages/77/2d/09b097adfdee0227cfd4c699b3375a842080f065bab9014248933497c3f9/cryptography-45.0.6-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5bd6020c80c5b2b2242d6c48487d7b85700f5e0038e67b29d706f98440d66eb5", size = 4198956, upload-time = "2025-08-05T23:58:55.209Z" }, + { url = "https://files.pythonhosted.org/packages/55/66/061ec6689207d54effdff535bbdf85cc380d32dd5377173085812565cf38/cryptography-45.0.6-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:eccddbd986e43014263eda489abbddfbc287af5cddfd690477993dbb31e31016", size = 4449859, upload-time = "2025-08-05T23:58:56.639Z" }, + { url = "https://files.pythonhosted.org/packages/41/ff/e7d5a2ad2d035e5a2af116e1a3adb4d8fcd0be92a18032917a089c6e5028/cryptography-45.0.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:550ae02148206beb722cfe4ef0933f9352bab26b087af00e48fdfb9ade35c5b3", size = 4320254, upload-time = "2025-08-05T23:58:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/82/27/092d311af22095d288f4db89fcaebadfb2f28944f3d790a4cf51fe5ddaeb/cryptography-45.0.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b64e668fc3528e77efa51ca70fadcd6610e8ab231e3e06ae2bab3b31c2b8ed9", size = 4554815, upload-time = "2025-08-05T23:59:00.283Z" }, + { url = "https://files.pythonhosted.org/packages/7e/01/aa2f4940262d588a8fdf4edabe4cda45854d00ebc6eaac12568b3a491a16/cryptography-45.0.6-cp37-abi3-win32.whl", hash = "sha256:780c40fb751c7d2b0c6786ceee6b6f871e86e8718a8ff4bc35073ac353c7cd02", size = 2912147, upload-time = "2025-08-05T23:59:01.716Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bc/16e0276078c2de3ceef6b5a34b965f4436215efac45313df90d55f0ba2d2/cryptography-45.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:20d15aed3ee522faac1a39fbfdfee25d17b1284bafd808e1640a74846d7c4d1b", size = 3390459, upload-time = "2025-08-05T23:59:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/56/d2/4482d97c948c029be08cb29854a91bd2ae8da7eb9c4152461f1244dcea70/cryptography-45.0.6-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:705bb7c7ecc3d79a50f236adda12ca331c8e7ecfbea51edd931ce5a7a7c4f012", size = 3576812, upload-time = "2025-08-05T23:59:04.833Z" }, + { url = "https://files.pythonhosted.org/packages/ec/24/55fc238fcaa122855442604b8badb2d442367dfbd5a7ca4bb0bd346e263a/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:826b46dae41a1155a0c0e66fafba43d0ede1dc16570b95e40c4d83bfcf0a451d", size = 4141694, upload-time = "2025-08-05T23:59:06.66Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/3ea4fa6fbe51baf3903806a0241c666b04c73d2358a3ecce09ebee8b9622/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cc4d66f5dc4dc37b89cfef1bd5044387f7a1f6f0abb490815628501909332d5d", size = 4375010, upload-time = "2025-08-05T23:59:08.14Z" }, + { url = "https://files.pythonhosted.org/packages/50/42/ec5a892d82d2a2c29f80fc19ced4ba669bca29f032faf6989609cff1f8dc/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f68f833a9d445cc49f01097d95c83a850795921b3f7cc6488731e69bde3288da", size = 4141377, upload-time = "2025-08-05T23:59:09.584Z" }, + { url = "https://files.pythonhosted.org/packages/e7/d7/246c4c973a22b9c2931999da953a2c19cae7c66b9154c2d62ffed811225e/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3b5bf5267e98661b9b888a9250d05b063220dfa917a8203744454573c7eb79db", size = 4374609, upload-time = "2025-08-05T23:59:11.923Z" }, + { url = "https://files.pythonhosted.org/packages/78/6d/c49ccf243f0a1b0781c2a8de8123ee552f0c8a417c6367a24d2ecb7c11b3/cryptography-45.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2384f2ab18d9be88a6e4f8972923405e2dbb8d3e16c6b43f15ca491d7831bd18", size = 3322156, upload-time = "2025-08-05T23:59:13.597Z" }, + { url = "https://files.pythonhosted.org/packages/61/69/c252de4ec047ba2f567ecb53149410219577d408c2aea9c989acae7eafce/cryptography-45.0.6-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fc022c1fa5acff6def2fc6d7819bbbd31ccddfe67d075331a65d9cfb28a20983", size = 3584669, upload-time = "2025-08-05T23:59:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/e3/fe/deea71e9f310a31fe0a6bfee670955152128d309ea2d1c79e2a5ae0f0401/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3de77e4df42ac8d4e4d6cdb342d989803ad37707cf8f3fbf7b088c9cbdd46427", size = 4153022, upload-time = "2025-08-05T23:59:16.954Z" }, + { url = "https://files.pythonhosted.org/packages/60/45/a77452f5e49cb580feedba6606d66ae7b82c128947aa754533b3d1bd44b0/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:599c8d7df950aa68baa7e98f7b73f4f414c9f02d0e8104a30c0182a07732638b", size = 4386802, upload-time = "2025-08-05T23:59:18.55Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b9/a2f747d2acd5e3075fdf5c145c7c3568895daaa38b3b0c960ef830db6cdc/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:31a2b9a10530a1cb04ffd6aa1cd4d3be9ed49f7d77a4dafe198f3b382f41545c", size = 4152706, upload-time = "2025-08-05T23:59:20.044Z" }, + { url = "https://files.pythonhosted.org/packages/81/ec/381b3e8d0685a3f3f304a382aa3dfce36af2d76467da0fd4bb21ddccc7b2/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e5b3dda1b00fb41da3af4c5ef3f922a200e33ee5ba0f0bc9ecf0b0c173958385", size = 4386740, upload-time = "2025-08-05T23:59:21.525Z" }, + { url = "https://files.pythonhosted.org/packages/0a/76/cf8d69da8d0b5ecb0db406f24a63a3f69ba5e791a11b782aeeefef27ccbb/cryptography-45.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:629127cfdcdc6806dfe234734d7cb8ac54edaf572148274fa377a7d3405b0043", size = 3331874, upload-time = "2025-08-05T23:59:23.017Z" }, ] [[package]] @@ -341,6 +341,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, ] +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, +] + [[package]] name = "durationpy" version = "0.10" @@ -421,6 +435,14 @@ dependencies = [ { name = "pydantic" }, ] +[package.optional-dependencies] +docker = [ + { name = "docker" }, +] +podman = [ + { name = "podman" }, +] + [package.dev-dependencies] dev = [ { name = "coverage" }, @@ -434,10 +456,13 @@ dev = [ [package.metadata] requires-dist = [ + { name = "docker", marker = "extra == 'docker'", specifier = ">=6.1.3" }, { name = "kubeflow-trainer-api", specifier = ">=2.0.0" }, { name = "kubernetes", specifier = ">=27.2.0" }, + { name = "podman", marker = "extra == 'podman'", specifier = ">=5.6.0" }, { name = "pydantic", specifier = ">=2.10.0" }, ] +provides-extras = ["docker", "podman"] [package.metadata.requires-dev] dev = [ @@ -525,6 +550,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "podman" +version = "5.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3b/36/070e7bf682ac0868450584df79198c178323e80f73b8fb9b6fec8bde0a65/podman-5.6.0.tar.gz", hash = "sha256:cc5f7aa9562e30f992fc170a48da970a7132be60d8a2e2941e6c17bd0a0b35c9", size = 72832, upload-time = "2025-09-05T09:42:40.071Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/9e/8c62f05b104d9f00edbb4c298b152deceb393ea67f0288d89d1139d7a859/podman-5.6.0-py3-none-any.whl", hash = "sha256:967ff8ad8c6b851bc5da1a9410973882d80e235a9410b7d1e931ce0c3324fbe3", size = 88713, upload-time = "2025-09-05T09:42:38.405Z" }, +] + [[package]] name = "pre-commit" version = "4.3.0" @@ -566,9 +605,9 @@ wheels = [ name = "pycparser" version = "2.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, ] [[package]] @@ -706,9 +745,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6a/a7/403e04aa96e2d94e1518d518d69718c2ba978c8d3ffa4ab3b101b94dbafa/pygithub-2.7.0.tar.gz", hash = "sha256:7cd6eafabb09b5369afba3586d86b1f1ad6f1326d2ff01bc47bb26615dce4cbb", size = 3707928 } +sdist = { url = "https://files.pythonhosted.org/packages/6a/a7/403e04aa96e2d94e1518d518d69718c2ba978c8d3ffa4ab3b101b94dbafa/pygithub-2.7.0.tar.gz", hash = "sha256:7cd6eafabb09b5369afba3586d86b1f1ad6f1326d2ff01bc47bb26615dce4cbb", size = 3707928, upload-time = "2025-07-31T11:52:53.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/76/d768dd31322173b3956692b75471ac37bf3759c7abb603152f6a9b6594a8/pygithub-2.7.0-py3-none-any.whl", hash = "sha256:40ecbfe26dc55cc34ab4b0ffa1d455e6f816ef9a2bc8d6f5ad18ce572f163700", size = 416514 }, + { url = "https://files.pythonhosted.org/packages/57/76/d768dd31322173b3956692b75471ac37bf3759c7abb603152f6a9b6594a8/pygithub-2.7.0-py3-none-any.whl", hash = "sha256:40ecbfe26dc55cc34ab4b0ffa1d455e6f816ef9a2bc8d6f5ad18ce572f163700", size = 416514, upload-time = "2025-07-31T11:52:51.909Z" }, ] [[package]] @@ -724,9 +763,9 @@ wheels = [ name = "pyjwt" version = "2.10.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785 } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997 }, + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, ] [package.optional-dependencies] @@ -741,17 +780,17 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/22/27582568be639dfe22ddb3902225f91f2f17ceff88ce80e4db396c8986da/PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba", size = 3392854 } +sdist = { url = "https://files.pythonhosted.org/packages/a7/22/27582568be639dfe22ddb3902225f91f2f17ceff88ce80e4db396c8986da/PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba", size = 3392854, upload-time = "2022-01-07T22:05:41.134Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/75/0b8ede18506041c0bf23ac4d8e2971b4161cd6ce630b177d0a08eb0d8857/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1", size = 349920 }, - { url = "https://files.pythonhosted.org/packages/59/bb/fddf10acd09637327a97ef89d2a9d621328850a72f1fdc8c08bdf72e385f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92", size = 601722 }, - { url = "https://files.pythonhosted.org/packages/5d/70/87a065c37cca41a75f2ce113a5a2c2aa7533be648b184ade58971b5f7ccc/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394", size = 680087 }, - { url = "https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d", size = 856678 }, - { url = "https://files.pythonhosted.org/packages/66/28/ca86676b69bf9f90e710571b67450508484388bfce09acf8a46f0b8c785f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858", size = 1133660 }, - { url = "https://files.pythonhosted.org/packages/3d/85/c262db650e86812585e2bc59e497a8f59948a005325a11bbbc9ecd3fe26b/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b", size = 663824 }, - { url = "https://files.pythonhosted.org/packages/fd/1a/cc308a884bd299b651f1633acb978e8596c71c33ca85e9dc9fa33a5399b9/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff", size = 1117912 }, - { url = "https://files.pythonhosted.org/packages/25/2d/b7df6ddb0c2a33afdb358f8af6ea3b8c4d1196ca45497dd37a56f0c122be/PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543", size = 204624 }, - { url = "https://files.pythonhosted.org/packages/5e/22/d3db169895faaf3e2eda892f005f433a62db2decbcfbc2f61e6517adfa87/PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93", size = 212141 }, + { url = "https://files.pythonhosted.org/packages/ce/75/0b8ede18506041c0bf23ac4d8e2971b4161cd6ce630b177d0a08eb0d8857/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1", size = 349920, upload-time = "2022-01-07T22:05:49.156Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/fddf10acd09637327a97ef89d2a9d621328850a72f1fdc8c08bdf72e385f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92", size = 601722, upload-time = "2022-01-07T22:05:50.989Z" }, + { url = "https://files.pythonhosted.org/packages/5d/70/87a065c37cca41a75f2ce113a5a2c2aa7533be648b184ade58971b5f7ccc/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394", size = 680087, upload-time = "2022-01-07T22:05:52.539Z" }, + { url = "https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d", size = 856678, upload-time = "2022-01-07T22:05:54.251Z" }, + { url = "https://files.pythonhosted.org/packages/66/28/ca86676b69bf9f90e710571b67450508484388bfce09acf8a46f0b8c785f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858", size = 1133660, upload-time = "2022-01-07T22:05:56.056Z" }, + { url = "https://files.pythonhosted.org/packages/3d/85/c262db650e86812585e2bc59e497a8f59948a005325a11bbbc9ecd3fe26b/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b", size = 663824, upload-time = "2022-01-07T22:05:57.434Z" }, + { url = "https://files.pythonhosted.org/packages/fd/1a/cc308a884bd299b651f1633acb978e8596c71c33ca85e9dc9fa33a5399b9/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff", size = 1117912, upload-time = "2022-01-07T22:05:58.665Z" }, + { url = "https://files.pythonhosted.org/packages/25/2d/b7df6ddb0c2a33afdb358f8af6ea3b8c4d1196ca45497dd37a56f0c122be/PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543", size = 204624, upload-time = "2022-01-07T22:06:00.085Z" }, + { url = "https://files.pythonhosted.org/packages/5e/22/d3db169895faaf3e2eda892f005f433a62db2decbcfbc2f61e6517adfa87/PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93", size = 212141, upload-time = "2022-01-07T22:06:01.861Z" }, ] [[package]] @@ -796,6 +835,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, + { url = "https://files.pythonhosted.org/packages/59/42/b86689aac0cdaee7ae1c58d464b0ff04ca909c19bb6502d4973cdd9f9544/pywin32-311-cp39-cp39-win32.whl", hash = "sha256:aba8f82d551a942cb20d4a83413ccbac30790b50efb89a75e4f586ac0bb8056b", size = 8760837, upload-time = "2025-07-14T20:12:59.59Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8a/1403d0353f8c5a2f0829d2b1c4becbf9da2f0a4d040886404fc4a5431e4d/pywin32-311-cp39-cp39-win_amd64.whl", hash = "sha256:e0c4cfb0621281fe40387df582097fd796e80430597cb9944f0ae70447bacd91", size = 9590187, upload-time = "2025-07-14T20:13:01.419Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/e0e8d802f124772cec9c75430b01a212f86f9de7546bda715e54140d5aeb/pywin32-311-cp39-cp39-win_arm64.whl", hash = "sha256:62ea666235135fee79bb154e695f3ff67370afefd71bd7fea7512fc70ef31e3d", size = 8778162, upload-time = "2025-07-14T20:13:03.544Z" }, +] + [[package]] name = "pyyaml" version = "6.0.2"