diff --git a/.github/workflows/check-pr-title.yaml b/.github/workflows/check-pr-title.yaml index 50ffcdab8..a745b0a46 100644 --- a/.github/workflows/check-pr-title.yaml +++ b/.github/workflows/check-pr-title.yaml @@ -33,6 +33,7 @@ jobs: ci docs examples + optimizer scripts test trainer diff --git a/Makefile b/Makefile index a3bf9833e..f79736666 100644 --- a/Makefile +++ b/Makefile @@ -37,8 +37,6 @@ VENV_DIR := $(PROJECT_DIR)/.venv help: ## Display this help. @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) -#UV := $(shell which uv) - .PHONY: uv uv: ## Install UV @command -v uv &> /dev/null || { \ @@ -57,7 +55,7 @@ verify: install-dev ## install all required tools @uv run ruff format --check kubeflow .PHONY: uv-venv -uv-venv: +uv-venv: ## Create uv virtual environment @if [ ! -d "$(VENV_DIR)" ]; then \ echo "Creating uv virtual environment in $(VENV_DIR)..."; \ uv venv; \ @@ -75,10 +73,14 @@ release: install-dev # make test-python will produce html coverage by default. Run with `make test-python report=xml` to produce xml report. .PHONY: test-python -test-python: uv-venv +test-python: uv-venv ## Run Python unit tests @uv sync - @uv run coverage run --source=kubeflow.trainer.backends.kubernetes.backend,kubeflow.trainer.utils.utils -m pytest ./kubeflow/trainer/backends/kubernetes/backend_test.py ./kubeflow/trainer/utils/utils_test.py - @uv run coverage report -m kubeflow/trainer/backends/kubernetes/backend.py kubeflow/trainer/utils/utils.py + @uv run coverage run --source=kubeflow.trainer.backends.kubernetes.backend,kubeflow.trainer.utils.utils -m pytest \ + ./kubeflow/trainer/backends/kubernetes/backend_test.py \ + ./kubeflow/trainer/backends/kubernetes/utils_test.py + @uv run coverage report -m \ + kubeflow/trainer/backends/kubernetes/backend.py \ + kubeflow/trainer/backends/kubernetes/utils.py ifeq ($(report),xml) @uv run coverage xml else @@ -87,7 +89,7 @@ endif .PHONY: install-dev -install-dev: uv uv-venv ruff ## Install uv, create .venv, sync deps; DEV=1 to include dev group; EXTRAS=comma,list for extras +install-dev: uv uv-venv ruff ## Install uv, create .venv, sync deps. @echo "Using virtual environment at: $(VENV_DIR)" @echo "Syncing dependencies with uv..." @uv sync diff --git a/docs/proposals/2-trainer-local-execution/README.md b/docs/proposals/2-trainer-local-execution/README.md index 8a803bf25..06188275c 100644 --- a/docs/proposals/2-trainer-local-execution/README.md +++ b/docs/proposals/2-trainer-local-execution/README.md @@ -14,6 +14,7 @@ AI Practitioners often want to experiment locally before scaling their models to The proposed local execution mode will allow engineers to quickly test their models in isolated containers or virtualenvs via subprocess, facilitating a faster and more efficient workflow. ### Goals + - Allow users to run training jobs on their local machines using container runtimes or subprocess. - Rework current Kubeflow Trainer SDK to implement Execution Backends with Kubernetes Backend as default. - Implement Local Execution Backends that integrates seamlessly with the Kubeflow SDK, supporting both single-node and multi-node training processes. @@ -21,6 +22,7 @@ The proposed local execution mode will allow engineers to quickly test their mod - Ensure compatibility with existing Kubeflow Trainer SDK features and user interfaces. ### Non-Goals + - Full support for distributed training in the first phase of implementation. - Support for all ML frameworks or runtime environments in the initial proof-of-concept. - Major changes to the Kubeflow Trainer SDK architecture. @@ -34,18 +36,22 @@ The local execution mode will allow users to run training jobs in container runt ### User Stories (Optional) #### Story 1 + As an AI Practitioner, I want to run my model locally using Podman/Docker containers so that I can test my training job without incurring the costs of running a Kubernetes cluster. #### Story 2 + As an AI Practitioner, I want to initialize datasets and models within Podman/Docker containers, so that I can streamline my local training environment. ### Notes/Constraints/Caveats + - Local execution mode will first support Subprocess, with future plans to explore Podman, Docker, and Apple Container. - The subprocess implementation will be restricted to single node. - The local execution mode will support only pytorch runtime initially. - Resource limitations on memory, cpu and gpu is not fully supported locally and might not be supported if the execution backend doesn't expose apis to support it. ### Risks and Mitigations + - **Risk**: Compatibility issues with non-Docker container runtimes. - **Mitigation**: Initially restrict support to Podman/Docker and evaluate alternatives for future phases. - **Risk**: Potential conflicts between local and Kubernetes execution modes. @@ -55,7 +61,7 @@ As an AI Practitioner, I want to initialize datasets and models within Podman/Do The local execution mode will be implemented using a new `LocalProcessBackend`, `PodmanBackend`, `DockerBackend` which will allow users to execute training jobs using containers and virtual environment isolation. The client will utilize container runtime capabilities to create isolated environments, including volumes and networks, to manage the training lifecycle. It will also allow for easy dataset and model initialization. -- Different execution backends will need to implement the same interface from the `ExecutionBackend` abstract class so `TrainerClient` can initialize and load the backend. +- Different execution backends will need to implement the same interface from the `RuntimeBackend` abstract class so `TrainerClient` can initialize and load the backend. - The Podman/Docker client will connect to a local container environment, create shared volumes, and initialize datasets and models as needed. - The **DockerBackend** will manage Docker containers, networks, and volumes using runtime definitions specified by the user. - The **PodmanBackend** will manage Podman containers, networks, and volumes using runtime definitions specified by the user. @@ -70,16 +76,20 @@ The local execution mode will be implemented using a new `LocalProcessBackend`, - **E2E Tests**: Conduct end-to-end tests to validate the local execution mode, ensuring that jobs can be initialized, executed, and tracked correctly within Podman/Docker containers. ### Graduation Criteria + - The feature will move to the `beta` stage once it supports multi-node training with pytorch framework as default runtime and works seamlessly with local environments. - Full support for multi-worker configurations and additional ML frameworks will be considered for the `stable` release. ## Implementation History + - **KEP Creation**: April 2025 - **Implementation Start**: April 2025 + ## Drawbacks - The initial implementation will be limited to single-worker training jobs, which may restrict users who need multi-node support. - The local execution mode will initially only support Subprocess and may require additional configurations for Podman/Docker container runtimes in the future. ## Alternatives + - **Full Kubernetes Execution**: Enable users to always run jobs on Kubernetes clusters, though this comes with higher costs and longer development cycles for ML engineers. diff --git a/kubeflow/common/__init__.py b/kubeflow/common/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/kubeflow/common/constants.py b/kubeflow/common/constants.py new file mode 100644 index 000000000..db82e5c3c --- /dev/null +++ b/kubeflow/common/constants.py @@ -0,0 +1,22 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# The default Kubernetes namespace. +DEFAULT_NAMESPACE = "default" + +# How long to wait in seconds for requests to the Kubernetes API Server. +DEFAULT_TIMEOUT = 120 + +# Unknown indicates that the value can't be identified. +UNKNOWN = "Unknown" diff --git a/kubeflow/trainer/backends/kubernetes/types.py b/kubeflow/common/types.py similarity index 100% rename from kubeflow/trainer/backends/kubernetes/types.py rename to kubeflow/common/types.py diff --git a/kubeflow/common/utils.py b/kubeflow/common/utils.py new file mode 100644 index 000000000..03913ae37 --- /dev/null +++ b/kubeflow/common/utils.py @@ -0,0 +1,40 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +from typing import Optional + +from kubernetes import config + +from kubeflow.common import constants + + +def is_running_in_k8s() -> bool: + return os.path.isdir("/var/run/secrets/kubernetes.io/") + + +def get_default_target_namespace(context: Optional[str] = None) -> str: + if not is_running_in_k8s(): + try: + all_contexts, current_context = config.list_kube_config_contexts() + # If context is set, we should get namespace from it. + if context: + for c in all_contexts: + if isinstance(c, dict) and c.get("name") == context: + return c["context"]["namespace"] + # Otherwise, try to get namespace from the current context. + return current_context["context"]["namespace"] + except Exception: + return constants.DEFAULT_NAMESPACE + with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace") as f: + return f.readline() diff --git a/kubeflow/optimizer/__init__.py b/kubeflow/optimizer/__init__.py new file mode 100644 index 000000000..a0d301b92 --- /dev/null +++ b/kubeflow/optimizer/__init__.py @@ -0,0 +1,39 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Import common types. +from kubeflow.common.types import KubernetesBackendConfig + +# Import the Kubeflow Optimizer client. +from kubeflow.optimizer.api.optimizer_client import OptimizerClient + +# Import the Kubeflow Optimizer types. +from kubeflow.optimizer.types.algorithm_types import GridSearch, RandomSearch +from kubeflow.optimizer.types.optimization_types import Objective, OptimizationJob, TrialConfig +from kubeflow.optimizer.types.search_types import Search + +# Import the Kubeflow Trainer types. +from kubeflow.trainer.types.types import TrainJobTemplate + +__all__ = [ + "GridSearch", + "KubernetesBackendConfig", + "Objective", + "OptimizationJob", + "OptimizerClient", + "RandomSearch", + "Search", + "TrainJobTemplate", + "TrialConfig", +] diff --git a/kubeflow/optimizer/api/__init__.py b/kubeflow/optimizer/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/kubeflow/optimizer/api/optimizer_client.py b/kubeflow/optimizer/api/optimizer_client.py new file mode 100644 index 000000000..4e739ed06 --- /dev/null +++ b/kubeflow/optimizer/api/optimizer_client.py @@ -0,0 +1,126 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +from typing import Any, Optional + +from kubeflow.common.types import KubernetesBackendConfig +from kubeflow.optimizer.backends.kubernetes.backend import KubernetesBackend +from kubeflow.optimizer.types.algorithm_types import BaseAlgorithm +from kubeflow.optimizer.types.optimization_types import Objective, OptimizationJob, TrialConfig +from kubeflow.trainer.types.types import TrainJobTemplate + +logger = logging.getLogger(__name__) + + +class OptimizerClient: + def __init__( + self, + backend_config: Optional[KubernetesBackendConfig] = None, + ): + """Initialize a Kubeflow Optimizer client. + + Args: + backend_config: Backend configuration. Either KubernetesBackendConfig or None to use + default config class. Defaults to KubernetesBackendConfig. + + Raises: + ValueError: Invalid backend configuration. + + """ + # Set the default backend config. + if not backend_config: + backend_config = KubernetesBackendConfig() + + if isinstance(backend_config, KubernetesBackendConfig): + self.backend = KubernetesBackend(backend_config) + else: + raise ValueError(f"Invalid backend config '{backend_config}'") + + def optimize( + self, + trial_template: TrainJobTemplate, + *, + trial_config: Optional[TrialConfig] = None, + search_space: dict[str, Any], + objectives: Optional[list[Objective]] = None, + algorithm: Optional[BaseAlgorithm] = None, + ) -> str: + """Create an OptimizationJob for hyperparameter tuning. + + Args: + trial_template: The TrainJob template defining the training script. + trial_config: Optional configuration to run Trials. + objectives: List of objectives to optimize. + search_space: Dictionary mapping parameter names to Search specifications using + Search.uniform(), Search.loguniform(), Search.choice(), etc. + algorithm: The optimization algorithm to use. Defaults to RandomSearch. + + Returns: + The unique name of the Experiment that has been generated. + + Raises: + ValueError: Input arguments are invalid. + TimeoutError: Timeout to create Experiment. + RuntimeError: Failed to create Experiment. + """ + return self.backend.optimize( + trial_template=trial_template, + trial_config=trial_config, + objectives=objectives, + search_space=search_space, + algorithm=algorithm, + ) + + def list_jobs(self) -> list[OptimizationJob]: + """List of the created OptimizationJobs + + Returns: + List of created OptimizationJobs. If no OptimizationJob exist, + an empty list is returned. + + Raises: + TimeoutError: Timeout to list OptimizationJobs. + RuntimeError: Failed to list OptimizationJobs. + """ + + return self.backend.list_jobs() + + def get_job(self, name: str) -> OptimizationJob: + """Get the OptimizationJob object + + Args: + name: Name of the OptimizationJob. + + Returns: + A OptimizationJob object. + + Raises: + TimeoutError: Timeout to get a OptimizationJob. + RuntimeError: Failed to get a OptimizationJob. + """ + + return self.backend.get_job(name=name) + + def delete_job(self, name: str): + """Delete the OptimizationJob. + + Args: + name: Name of the OptimizationJob. + + Raises: + TimeoutError: Timeout to delete OptimizationJob. + RuntimeError: Failed to delete OptimizationJob. + """ + return self.backend.delete_job(name=name) diff --git a/kubeflow/trainer/utils/__init__.py b/kubeflow/optimizer/backends/__init__.py similarity index 93% rename from kubeflow/trainer/utils/__init__.py rename to kubeflow/optimizer/backends/__init__.py index abf5d0929..48e3dcfaa 100644 --- a/kubeflow/trainer/utils/__init__.py +++ b/kubeflow/optimizer/backends/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2024 The Kubeflow Authors. +# Copyright 2025 The Kubeflow Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/kubeflow/optimizer/backends/base.py b/kubeflow/optimizer/backends/base.py new file mode 100644 index 000000000..9e55e5cfe --- /dev/null +++ b/kubeflow/optimizer/backends/base.py @@ -0,0 +1,46 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import abc +from typing import Any, Optional + +from kubeflow.optimizer.types.algorithm_types import RandomSearch +from kubeflow.optimizer.types.optimization_types import Objective, OptimizationJob, TrialConfig +from kubeflow.trainer.types.types import TrainJobTemplate + + +class RuntimeBackend(abc.ABC): + @abc.abstractmethod + def optimize( + self, + trial_template: TrainJobTemplate, + *, + search_space: dict[str, Any], + trial_config: Optional[TrialConfig] = None, + objectives: Optional[list[Objective]] = None, + algorithm: Optional[RandomSearch] = None, + ) -> str: + raise NotImplementedError() + + @abc.abstractmethod + def list_jobs(self) -> list[OptimizationJob]: + raise NotImplementedError() + + @abc.abstractmethod + def get_job(self, name: str) -> OptimizationJob: + raise NotImplementedError() + + @abc.abstractmethod + def delete_job(self, name: str): + raise NotImplementedError() diff --git a/kubeflow/optimizer/backends/kubernetes/__init__.py b/kubeflow/optimizer/backends/kubernetes/__init__.py new file mode 100644 index 000000000..48e3dcfaa --- /dev/null +++ b/kubeflow/optimizer/backends/kubernetes/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/kubeflow/optimizer/backends/kubernetes/backend.py b/kubeflow/optimizer/backends/kubernetes/backend.py new file mode 100644 index 000000000..e8565fcac --- /dev/null +++ b/kubeflow/optimizer/backends/kubernetes/backend.py @@ -0,0 +1,364 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import multiprocessing +import random +import string +from typing import Any, Optional +import uuid + +from kubeflow_katib_api import models +from kubernetes import client, config + +import kubeflow.common.constants as common_constants +from kubeflow.common.types import KubernetesBackendConfig +import kubeflow.common.utils as common_utils +from kubeflow.optimizer.backends.base import RuntimeBackend +from kubeflow.optimizer.backends.kubernetes import utils +from kubeflow.optimizer.constants import constants +from kubeflow.optimizer.types.algorithm_types import RandomSearch +from kubeflow.optimizer.types.optimization_types import ( + Metric, + Objective, + OptimizationJob, + Trial, + TrialConfig, +) +from kubeflow.trainer.backends.kubernetes.backend import KubernetesBackend as TrainerBackend +import kubeflow.trainer.constants.constants as trainer_constants +from kubeflow.trainer.types.types import TrainJobTemplate + +logger = logging.getLogger(__name__) + + +class KubernetesBackend(RuntimeBackend): + def __init__(self, cfg: KubernetesBackendConfig): + if cfg.namespace is None: + cfg.namespace = common_utils.get_default_target_namespace(cfg.context) + + # If client configuration is not set, use kube-config to access Kubernetes APIs. + if cfg.client_configuration is None: + # Load kube-config or in-cluster config. + if cfg.config_file or not common_utils.is_running_in_k8s(): + config.load_kube_config(config_file=cfg.config_file, context=cfg.context) + else: + config.load_incluster_config() + + k8s_client = client.ApiClient(cfg.client_configuration) + self.custom_api = client.CustomObjectsApi(k8s_client) + self.core_api = client.CoreV1Api(k8s_client) + + self.namespace = cfg.namespace + self.trainer_backend = TrainerBackend(cfg) + + def optimize( + self, + trial_template: TrainJobTemplate, + *, + search_space: dict[str, Any], + trial_config: Optional[TrialConfig] = None, + objectives: Optional[list[Objective]] = None, + algorithm: Optional[RandomSearch] = None, + ) -> str: + # Generate unique name for the OptimizationJob. + optimization_job_name = random.choice(string.ascii_lowercase) + uuid.uuid4().hex[:11] + + # Validate search_space + if not search_space: + raise ValueError("Search space must be set.") + + # Set defaults. + objectives = objectives or [Objective()] + algorithm = algorithm or RandomSearch() + trial_config = trial_config or TrialConfig() + + # Iterate over search space to build the following values: + # experiment.spec.parameters to define distribution and feasible space. + # experiment.spec.trialTemplate.trialParameters to reference parameters in Trials. + # Trainer function arguments for the appropriate substitution. + parameters_spec = [] + trial_parameters = [] + if trial_template.trainer.func_args is None: + trial_template.trainer.func_args = {} + + for param_name, param_spec in search_space.items(): + param_spec.name = param_name + parameters_spec.append(param_spec) + + trial_parameters.append( + models.V1beta1TrialParameterSpec( + name=param_name, + reference=param_name, + ) + ) + + trial_template.trainer.func_args[param_name] = f"${{trialParameters.{param_name}}}" + + # Build the Experiment. + experiment = models.V1beta1Experiment( + apiVersion=constants.API_VERSION, + kind=constants.EXPERIMENT_KIND, + metadata=models.IoK8sApimachineryPkgApisMetaV1ObjectMeta(name=optimization_job_name), + spec=models.V1beta1ExperimentSpec( + # Trial template and parameters. + trialTemplate=models.V1beta1TrialTemplate( + retain=True, + primaryContainerName=trainer_constants.NODE, + trialParameters=trial_parameters, + trialSpec={ + "apiVersion": trainer_constants.API_VERSION, + "kind": trainer_constants.TRAINJOB_KIND, + "spec": self.trainer_backend._get_trainjob_spec( + runtime=trial_template.runtime, + trainer=trial_template.trainer, + initializer=trial_template.initializer, + ).to_dict(), + }, + ), + parameters=parameters_spec, + # Trial Configs. + maxTrialCount=trial_config.num_trials, + parallelTrialCount=trial_config.parallel_trials, + maxFailedTrialCount=trial_config.max_failed_trials, + # Objective specification. + objective=models.V1beta1ObjectiveSpec( + objectiveMetricName=objectives[0].metric, + type=objectives[0].direction.value, + additionalMetricNames=[obj.metric for obj in objectives[1:]] + if len(objectives) > 1 + else None, + ), + # Algorithm specification. + algorithm=algorithm._to_katib_spec(), + ), + ) + + # Create the Experiment. + try: + self.custom_api.create_namespaced_custom_object( + constants.GROUP, + constants.VERSION, + self.namespace, + constants.EXPERIMENT_PLURAL, + experiment.to_dict(), + ) + except multiprocessing.TimeoutError as e: + raise TimeoutError( + f"Timeout to create {constants.OPTIMIZATION_JOB_KIND}: " + f"{self.namespace}/{optimization_job_name}" + ) from e + except Exception as e: + raise RuntimeError( + f"Failed to create {constants.OPTIMIZATION_JOB_KIND}: " + f"{self.namespace}/{optimization_job_name}" + ) from e + + logger.debug( + f"{constants.OPTIMIZATION_JOB_KIND} {self.namespace}/{optimization_job_name} " + "has been created" + ) + + return optimization_job_name + + def list_jobs(self) -> list[OptimizationJob]: + """List of the created OptimizationJobs""" + result = [] + + try: + thread = self.custom_api.list_namespaced_custom_object( + constants.GROUP, + constants.VERSION, + self.namespace, + constants.EXPERIMENT_PLURAL, + async_req=True, + ) + + optimization_job_list = models.V1beta1ExperimentList.from_dict( + thread.get(common_constants.DEFAULT_TIMEOUT) + ) + + if not optimization_job_list: + return result + + for optimization_job in optimization_job_list.items: + result.append(self.__get_optimization_job_from_cr(optimization_job)) + + except multiprocessing.TimeoutError as e: + raise TimeoutError( + f"Timeout to list {constants.OPTIMIZATION_JOB_KIND}s in namespace: {self.namespace}" + ) from e + except Exception as e: + raise RuntimeError( + f"Failed to list {constants.OPTIMIZATION_JOB_KIND}s in namespace: {self.namespace}" + ) from e + + return result + + def get_job(self, name: str) -> OptimizationJob: + """Get the OptimizationJob object""" + + try: + thread = self.custom_api.get_namespaced_custom_object( + constants.GROUP, + constants.VERSION, + self.namespace, + constants.EXPERIMENT_PLURAL, + name, + async_req=True, + ) + + optimization_job = models.V1beta1Experiment.from_dict( + thread.get(common_constants.DEFAULT_TIMEOUT) # type: ignore + ) + + except multiprocessing.TimeoutError as e: + raise TimeoutError( + f"Timeout to get {constants.OPTIMIZATION_JOB_KIND}: {self.namespace}/{name}" + ) from e + except Exception as e: + raise RuntimeError( + f"Failed to get {constants.OPTIMIZATION_JOB_KIND}: {self.namespace}/{name}" + ) from e + + return self.__get_optimization_job_from_cr(optimization_job) # type: ignore + + def delete_job(self, name: str): + """Delete the OptimizationJob""" + + try: + self.custom_api.delete_namespaced_custom_object( + constants.GROUP, + constants.VERSION, + self.namespace, + constants.EXPERIMENT_PLURAL, + name=name, + ) + except multiprocessing.TimeoutError as e: + raise TimeoutError( + f"Timeout to delete {constants.OPTIMIZATION_JOB_KIND}: {self.namespace}/{name}" + ) from e + except Exception as e: + raise RuntimeError( + f"Failed to delete {constants.OPTIMIZATION_JOB_KIND}: {self.namespace}/{name}" + ) from e + + logger.debug(f"{constants.OPTIMIZATION_JOB_KIND} {self.namespace}/{name} has been deleted") + + def __get_optimization_job_from_cr( + self, + optimization_job_cr: models.V1beta1Experiment, + ) -> OptimizationJob: + if not ( + optimization_job_cr.metadata + and optimization_job_cr.metadata.name + and optimization_job_cr.metadata.namespace + and optimization_job_cr.spec + and optimization_job_cr.spec.parameters + and optimization_job_cr.spec.objective + and optimization_job_cr.spec.algorithm + and optimization_job_cr.spec.max_trial_count + and optimization_job_cr.spec.parallel_trial_count + and optimization_job_cr.metadata.creation_timestamp + ): + raise Exception( + f"{constants.OPTIMIZATION_JOB_KIND} CR is invalid: {optimization_job_cr}" + ) + + optimization_job = OptimizationJob( + name=optimization_job_cr.metadata.name, + search_space=utils.get_search_space_from_katib_spec( + optimization_job_cr.spec.parameters + ), + objectives=utils.get_objectives_from_katib_spec(optimization_job_cr.spec.objective), + algorithm=utils.get_algorithm_from_katib_spec(optimization_job_cr.spec.algorithm), + trial_config=TrialConfig( + num_trials=optimization_job_cr.spec.max_trial_count, + parallel_trials=optimization_job_cr.spec.parallel_trial_count, + max_failed_trials=optimization_job_cr.spec.max_failed_trial_count, + ), + trials=self.__get_trials_from_job(optimization_job_cr.metadata.name), + creation_timestamp=optimization_job_cr.metadata.creation_timestamp, + status=constants.OPTIMIZATION_JOB_CREATED, # The default OptimizationJob status. + ) + + # Update the OptimizationJob status from Experiment conditions. + if optimization_job_cr.status and optimization_job_cr.status.conditions: + for c in optimization_job_cr.status.conditions: + if c.type == constants.EXPERIMENT_SUCCEEDED and c.status == "True": + optimization_job.status = constants.OPTIMIZATION_JOB_COMPLETE + elif c.type == constants.OPTIMIZATION_JOB_FAILED and c.status == "True": + optimization_job.status = constants.OPTIMIZATION_JOB_FAILED + else: + for trial in optimization_job.trials: + if trial.trainjob.status == trainer_constants.TRAINJOB_RUNNING: + optimization_job.status = constants.OPTIMIZATION_JOB_RUNNING + + return optimization_job + + def __get_trials_from_job(self, optimization_job_name: str) -> list[Trial]: + result = [] + try: + thread = self.custom_api.list_namespaced_custom_object( + constants.GROUP, + constants.VERSION, + self.namespace, + constants.TRIAL_PLURAL, + label_selector=f"{constants.EXPERIMENT_LABEL}={optimization_job_name}", + async_req=True, + ) + + trial_list = models.V1beta1TrialList.from_dict( + thread.get(common_constants.DEFAULT_TIMEOUT) + ) + + if not trial_list: + return result + + for t in trial_list.items: + if not (t.metadata and t.metadata.name and t.spec and t.spec.parameter_assignments): + raise ValueError(f"{constants.TRIAL_KIND} CR is invalid: {t}") + + # Trial name is equal to the TrainJob name. + trial = Trial( + name=t.metadata.name, + parameters={ + pa.name: pa.value + for pa in t.spec.parameter_assignments + if pa.name is not None and pa.value is not None + }, + trainjob=self.trainer_backend.get_job(name=t.metadata.name), + ) + if t.status and t.status.observation and t.status.observation.metrics: + trial.metrics = [ + Metric(name=m.name, latest=m.latest, max=m.max, min=m.min) + for m in t.status.observation.metrics + if m.name is not None + and m.latest is not None + and m.max is not None + and m.min is not None + ] + + result.append(trial) + + except multiprocessing.TimeoutError as e: + raise TimeoutError( + f"Timeout to list {constants.TRIAL_KIND}s in namespace: {self.namespace}" + ) from e + except Exception as e: + raise RuntimeError( + f"Failed to list {constants.TRIAL_KIND}s in namespace: {self.namespace}" + ) from e + + return result diff --git a/kubeflow/optimizer/backends/kubernetes/utils.py b/kubeflow/optimizer/backends/kubernetes/utils.py new file mode 100644 index 000000000..1682ecbe7 --- /dev/null +++ b/kubeflow/optimizer/backends/kubernetes/utils.py @@ -0,0 +1,112 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import fields +from typing import Any, Optional, Union, get_args, get_origin + +from kubeflow_katib_api import models + +from kubeflow.optimizer.constants import constants +from kubeflow.optimizer.types.algorithm_types import ( + ALGORITHM_REGISTRY, + GridSearch, + RandomSearch, +) +from kubeflow.optimizer.types.optimization_types import Direction, Objective +from kubeflow.optimizer.types.search_types import ( + CategoricalSearchSpace, + ContinuousSearchSpace, + Distribution, +) + + +def convert_value(raw_value: str, target_type: Any): + origin = get_origin(target_type) + args = get_args(target_type) + + if origin is Optional: + target_type = args[0] + + if target_type is int: + return int(raw_value) + elif target_type is float: + return float(raw_value) + elif target_type is bool: + return raw_value.lower() in ("True", "1") + return raw_value + + +def get_algorithm_from_katib_spec( + algorithm: models.V1beta1AlgorithmSpec, +) -> Union[GridSearch, RandomSearch]: + alg_cls = ALGORITHM_REGISTRY.get(algorithm.algorithm_name or "") + + if alg_cls is None: + raise ValueError(f"Kubeflow SDK doesn't support {algorithm.algorithm_name} algorithm.") + + kwargs = {} + settings = {s.name: s.value for s in algorithm.algorithm_settings or []} + + for f in fields(alg_cls): + raw_value = settings.get(f.name) + if raw_value is None: + continue + + if f.name in settings: + kwargs[f.name] = convert_value(raw_value, f.type) + + return alg_cls(**kwargs) + + +def get_objectives_from_katib_spec(objective: models.V1beta1ObjectiveSpec) -> list[Objective]: + if objective.objective_metric_name is None: + raise ValueError("Objective metric name cannot be empty") + + # TODO (andreyvelich): Katib doesn't support multi-objective optimization. + # Currently, the first metric is objective, and the rest is additional metrics. + direction = Direction(objective.type) + metrics = [objective.objective_metric_name] + (objective.additional_metric_names or []) + + return [Objective(metric=m, direction=direction) for m in metrics] + + +def get_search_space_from_katib_spec( + parameters: list[models.V1beta1ParameterSpec], +) -> dict[str, Union[ContinuousSearchSpace, CategoricalSearchSpace]]: + search_space = {} + + for p in parameters: + if p.parameter_type == constants.CATEGORICAL_PARAMETERS: + if not (p.feasible_space and p.feasible_space.list): + raise ValueError(f"Katib categorical parameters are invalid: {parameters}") + + search_space[p.name] = CategoricalSearchSpace( + choices=[str(v) for v in p.feasible_space.list] + ) + else: + if not ( + p.feasible_space + and p.feasible_space.min + and p.feasible_space.max + and p.feasible_space.distribution + ): + raise ValueError(f"Katib continuous parameters are invalid: {parameters}") + + search_space[p.name] = ContinuousSearchSpace( + min=float(p.feasible_space.min), + max=float(p.feasible_space.max), + distribution=Distribution(p.feasible_space.distribution), + ) + + return search_space diff --git a/kubeflow/optimizer/constants/__init__.py b/kubeflow/optimizer/constants/__init__.py new file mode 100644 index 000000000..48e3dcfaa --- /dev/null +++ b/kubeflow/optimizer/constants/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/kubeflow/optimizer/constants/constants.py b/kubeflow/optimizer/constants/constants.py new file mode 100644 index 000000000..d3d42b85d --- /dev/null +++ b/kubeflow/optimizer/constants/constants.py @@ -0,0 +1,56 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Common constants. +GROUP = "kubeflow.org" +VERSION = "v1beta1" +API_VERSION = f"{GROUP}/{VERSION}" + +# The Kind name for the Experiment. +EXPERIMENT_KIND = "Experiment" + +# The plural for the Experiment. +EXPERIMENT_PLURAL = "experiments" + +# The succeeded condition for the Experiment. +EXPERIMENT_SUCCEEDED = "Succeeded" + +# Label to identify Experiment's resources. +EXPERIMENT_LABEL = "katib.kubeflow.org/experiment" + +# The plural for the Trials. +TRIAL_PLURAL = "trials" + +# The Kind name for the Trials. +TRIAL_KIND = "Trial" + +# The Kind name for the OptimizationJob. +OPTIMIZATION_JOB_KIND = "OptimizationJob" + +# The default status for the OptimizationJob once users create it. +OPTIMIZATION_JOB_CREATED = "Created" + +# The running status of the OptimizationJob, defined when at least one TrainJob is running. +OPTIMIZATION_JOB_RUNNING = "Running" + +# The complete status of the OptimizationJob, defined when Experiment CR has succeeded condition. +OPTIMIZATION_JOB_COMPLETE = "Complete" + +# The failed status of the OptimizationJob, defined when Experiment CR has failed condition. +OPTIMIZATION_JOB_FAILED = "Failed" + +# Katib search space parameter types. +DOUBLE_PARAMETER = "double" +CATEGORICAL_PARAMETERS = "categorical" diff --git a/kubeflow/optimizer/types/__init__.py b/kubeflow/optimizer/types/__init__.py new file mode 100644 index 000000000..48e3dcfaa --- /dev/null +++ b/kubeflow/optimizer/types/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/kubeflow/optimizer/types/algorithm_types.py b/kubeflow/optimizer/types/algorithm_types.py new file mode 100644 index 000000000..753fee3db --- /dev/null +++ b/kubeflow/optimizer/types/algorithm_types.py @@ -0,0 +1,87 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import abc +from dataclasses import dataclass, fields +from typing import Any, Optional + +from kubeflow_katib_api import models + + +def algorithm_to_katib_spec(obj: Any) -> models.V1beta1AlgorithmSpec: + """Convert any dataclass-based algorithm to a Katib AlgorithmSpec""" + settings = [] + for f in fields(obj): + value = getattr(obj, f.name) + if value is not None: + settings.append( + models.V1beta1AlgorithmSetting( + name=f.name, + value=str(value), + ) + ) + + return models.V1beta1AlgorithmSpec( + algorithmName=obj.algorithm_name, + algorithmSettings=settings or None, + ) + + +# Base implementation for the search algorithm. +class BaseAlgorithm(abc.ABC): + @property + @abc.abstractmethod + def algorithm_name(self) -> str: + pass + + @abc.abstractmethod + def _to_katib_spec(self): + raise NotImplementedError() + + +@dataclass +class GridSearch(BaseAlgorithm): + """Grid search algorithm.""" + + @property + def algorithm_name(self) -> str: + return "grid" + + def _to_katib_spec(self): + return algorithm_to_katib_spec(self) + + +@dataclass +class RandomSearch(BaseAlgorithm): + """Random search algorithm. + + Args: + random_state (`Optional[int]`): Random seed for reproducibility. + """ + + random_state: Optional[int] = None + + @property + def algorithm_name(self) -> str: + return "random" + + def _to_katib_spec(self): + return algorithm_to_katib_spec(self) + + +# Registry of supported search algorithms. +ALGORITHM_REGISTRY = { + GridSearch().algorithm_name: GridSearch, + RandomSearch().algorithm_name: RandomSearch, +} diff --git a/kubeflow/optimizer/types/optimization_types.py b/kubeflow/optimizer/types/optimization_types.py new file mode 100644 index 000000000..2ecf64e43 --- /dev/null +++ b/kubeflow/optimizer/types/optimization_types.py @@ -0,0 +1,119 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass, field +from datetime import datetime +from enum import Enum +from typing import Optional, Union + +import kubeflow.common.constants as common_constants +from kubeflow.optimizer.types.algorithm_types import GridSearch, RandomSearch +from kubeflow.optimizer.types.search_types import CategoricalSearchSpace, ContinuousSearchSpace +from kubeflow.trainer.types.types import TrainJob + + +# Direction for optimization objective +class Direction(Enum): + """Direction for optimization objective.""" + + MAXIMIZE = "maximize" + MINIMIZE = "minimize" + + +# Configuration for the objective metric +@dataclass +class Objective: + """Objective configuration for hyperparameter optimization. + + Args: + metric (`str`): The name of the metric to optimize. Defaults to "loss". + direction (`Direction`): Whether to maximize or minimize the metric. Defaults to "minimize". + """ + + metric: str = "loss" + direction: Direction = Direction.MINIMIZE + + def __post_init__(self): + if isinstance(self.direction, str): + self.direction = Direction(self.direction) + + +# Configuration for trial execution +@dataclass +class TrialConfig: + """Trial configuration for hyperparameter optimization. + + Args: + num_trials (`int`): Number of trials to run. Defaults to 10. + parallel_trials (`int`): Number of trials to run in parallel. Defaults to 1. + max_failed_trials (`Optional[int]`): Maximum number of failed trials before stopping. + """ + + num_trials: int = 10 + parallel_trials: int = 1 + max_failed_trials: Optional[int] = None + + +@dataclass +class Metric: + name: str + min: str + max: str + latest: str + + +# Representation of the single trial +@dataclass +class Trial: + """Representation for a trial. + + Args: + name (`str`): The name of the Trial. + parameters (`dict[str, str]`): Hyperparameters assigned to this Trial. + metrics (`list[Metric]`): Observed metrics for this Trial. The metrics are collected + only for completed Trials. + trainjob (`TrainJob`): Representation of the TrainJob + """ + + name: str + parameters: dict[str, str] + trainjob: TrainJob + metrics: list[Metric] = field(default_factory=list) + + +# Representation for the OptimizationJob +@dataclass +class OptimizationJob: + """Representation for an optimization job. + + Args: + name (`str`): The name of the OptimizationJob. + objectives (`list[Objective]`): The objective configuration. Currently, only the + first metric defined in the objectives list is optimized. Any additional metrics are + collected and displayed in the Trial results. + algorithm (`RandomSearch`): The algorithm configuration. + trial_config (`TrialConfig`): The trial configuration. + trials (`list[Trial]`): The list of created Trials. + creation_timestamp (`datetime`): The creation timestamp. + status (`str`): The current status of the optimization job. + """ + + name: str + search_space: dict[str, Union[ContinuousSearchSpace, CategoricalSearchSpace]] + objectives: list[Objective] + algorithm: Union[GridSearch, RandomSearch] + trial_config: TrialConfig + trials: list[Trial] + creation_timestamp: datetime + status: str = common_constants.UNKNOWN diff --git a/kubeflow/optimizer/types/search_types.py b/kubeflow/optimizer/types/search_types.py new file mode 100644 index 000000000..77fe25002 --- /dev/null +++ b/kubeflow/optimizer/types/search_types.py @@ -0,0 +1,95 @@ +# Copyright 2025 The Kubeflow Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from enum import Enum +from typing import Union + +from kubeflow_katib_api import models as katib_models + +import kubeflow.optimizer.constants.constants as constants + + +# Search space distribution helpers +class Search: + """Helper class for defining search space parameters.""" + + @staticmethod + def uniform(min: float, max: float) -> katib_models.V1beta1ParameterSpec: + """Sample a float value uniformly between `min` and `max`. + + Args: + min: Lower boundary for the float value. + max: Upper boundary for the float value. + + Returns: + Katib ParameterSpec object. + """ + return katib_models.V1beta1ParameterSpec( + parameterType=constants.DOUBLE_PARAMETER, + feasibleSpace=katib_models.V1beta1FeasibleSpace( + min=str(min), max=str(max), distribution=Distribution.UNIFORM.value + ), + ) + + @staticmethod + def loguniform(min: float, max: float) -> katib_models.V1beta1ParameterSpec: + """Sample a float value with log-uniform distribution between `min` and `max`. + + Args: + min: Lower boundary for the float value. + max: Upper boundary for the float value. + + Returns: + Katib ParameterSpec object. + """ + return katib_models.V1beta1ParameterSpec( + parameterType=constants.DOUBLE_PARAMETER, + feasibleSpace=katib_models.V1beta1FeasibleSpace( + min=str(min), max=str(max), distribution=Distribution.LOG_UNIFORM.value + ), + ) + + @staticmethod + def choice(values: list) -> katib_models.V1beta1ParameterSpec: + """Sample a categorical value from the list. + + Args: + values: List of categorical values. + + Returns: + Katib ParameterSpec object. + """ + return katib_models.V1beta1ParameterSpec( + parameterType=constants.CATEGORICAL_PARAMETERS, + feasibleSpace=katib_models.V1beta1FeasibleSpace(list=[str(v) for v in values]), + ) + + +# Distribution for the search space. +class Distribution(Enum): + UNIFORM = "uniform" + LOG_UNIFORM = "logUniform" + + +@dataclass +class ContinuousSearchSpace: + min: Union[float, int] + max: Union[float, int] + distribution: Distribution + + +@dataclass +class CategoricalSearchSpace: + choices: list diff --git a/kubeflow/trainer/__init__.py b/kubeflow/trainer/__init__.py index 7caebc2d4..1fce8e0f5 100644 --- a/kubeflow/trainer/__init__.py +++ b/kubeflow/trainer/__init__.py @@ -13,15 +13,15 @@ # limitations under the License. -# Import the Kubeflow Trainer client. -from kubeflow.trainer.api.trainer_client import TrainerClient # noqa: F401 +# Import common types. +from kubeflow.common.types import KubernetesBackendConfig -# import backends and its associated configs -from kubeflow.trainer.backends.kubernetes.types import KubernetesBackendConfig +# Import the Kubeflow Trainer client. +from kubeflow.trainer.api.trainer_client import TrainerClient from kubeflow.trainer.backends.localprocess.types import LocalProcessBackendConfig # Import the Kubeflow Trainer constants. -from kubeflow.trainer.constants.constants import DATASET_PATH, MODEL_PATH # noqa: F401 +from kubeflow.trainer.constants.constants import DATASET_PATH, MODEL_PATH # Import the Kubeflow Trainer types. from kubeflow.trainer.types.types import ( @@ -40,6 +40,7 @@ TorchTuneConfig, TorchTuneInstructDataset, TrainerType, + TrainJobTemplate, ) __all__ = [ @@ -59,6 +60,7 @@ "TorchTuneConfig", "TorchTuneInstructDataset", "RuntimeTrainer", + "TrainJobTemplate", "TrainerClient", "TrainerType", "LocalProcessBackendConfig", diff --git a/kubeflow/trainer/api/trainer_client.py b/kubeflow/trainer/api/trainer_client.py index 6b564c90a..079b3fa55 100644 --- a/kubeflow/trainer/api/trainer_client.py +++ b/kubeflow/trainer/api/trainer_client.py @@ -16,8 +16,8 @@ import logging from typing import Optional, Union +from kubeflow.common.types import KubernetesBackendConfig from kubeflow.trainer.backends.kubernetes.backend import KubernetesBackend -from kubeflow.trainer.backends.kubernetes.types import KubernetesBackendConfig from kubeflow.trainer.backends.localprocess.backend import ( LocalProcessBackend, LocalProcessBackendConfig, @@ -31,7 +31,7 @@ class TrainerClient: def __init__( self, - backend_config: Union[KubernetesBackendConfig, LocalProcessBackendConfig] = None, + backend_config: Optional[Union[KubernetesBackendConfig, LocalProcessBackendConfig]] = None, ): """Initialize a Kubeflow Trainer client. @@ -44,7 +44,7 @@ def __init__( ValueError: Invalid backend configuration. """ - # initialize training backend + # Set the default backend config. if not backend_config: backend_config = KubernetesBackendConfig() diff --git a/kubeflow/trainer/backends/base.py b/kubeflow/trainer/backends/base.py index 0316b7b61..300708ee5 100644 --- a/kubeflow/trainer/backends/base.py +++ b/kubeflow/trainer/backends/base.py @@ -20,7 +20,7 @@ from kubeflow.trainer.types import types -class ExecutionBackend(abc.ABC): +class RuntimeBackend(abc.ABC): @abc.abstractmethod def list_runtimes(self) -> list[types.Runtime]: raise NotImplementedError() diff --git a/kubeflow/trainer/backends/kubernetes/backend.py b/kubeflow/trainer/backends/kubernetes/backend.py index 4310182bb..908223d98 100644 --- a/kubeflow/trainer/backends/kubernetes/backend.py +++ b/kubeflow/trainer/backends/kubernetes/backend.py @@ -26,27 +26,26 @@ from kubeflow_trainer_api import models from kubernetes import client, config, watch -from kubeflow.trainer.backends.base import ExecutionBackend -from kubeflow.trainer.backends.kubernetes import types as k8s_types +import kubeflow.common.constants as common_constants +from kubeflow.common.types import KubernetesBackendConfig +import kubeflow.common.utils as common_utils +from kubeflow.trainer.backends.base import RuntimeBackend +import kubeflow.trainer.backends.kubernetes.utils as utils from kubeflow.trainer.constants import constants from kubeflow.trainer.types import types -from kubeflow.trainer.utils import utils logger = logging.getLogger(__name__) -class KubernetesBackend(ExecutionBackend): - def __init__( - self, - cfg: k8s_types.KubernetesBackendConfig, - ): +class KubernetesBackend(RuntimeBackend): + def __init__(self, cfg: KubernetesBackendConfig): if cfg.namespace is None: - cfg.namespace = utils.get_default_target_namespace(cfg.context) + cfg.namespace = common_utils.get_default_target_namespace(cfg.context) # If client configuration is not set, use kube-config to access Kubernetes APIs. if cfg.client_configuration is None: # Load kube-config or in-cluster config. - if cfg.config_file or not utils.is_running_in_k8s(): + if cfg.config_file or not common_utils.is_running_in_k8s(): config.load_kube_config(config_file=cfg.config_file, context=cfg.context) else: config.load_incluster_config() @@ -68,7 +67,7 @@ def list_runtimes(self) -> list[types.Runtime]: ) runtime_list = models.TrainerV1alpha1ClusterTrainingRuntimeList.from_dict( - thread.get(constants.DEFAULT_TIMEOUT) + thread.get(common_constants.DEFAULT_TIMEOUT) ) if not runtime_list: @@ -85,7 +84,7 @@ def list_runtimes(self) -> list[types.Runtime]: f"{constants.RUNTIME_FRAMEWORK_LABEL} label." ) continue - result.append(self.__get_runtime_from_crd(runtime)) + result.append(self.__get_runtime_from_cr(runtime)) except multiprocessing.TimeoutError as e: raise TimeoutError( @@ -113,7 +112,7 @@ def get_runtime(self, name: str) -> types.Runtime: ) runtime = models.TrainerV1alpha1ClusterTrainingRuntime.from_dict( - thread.get(constants.DEFAULT_TIMEOUT) # type: ignore + thread.get(common_constants.DEFAULT_TIMEOUT) # type: ignore ) except multiprocessing.TimeoutError as e: @@ -127,7 +126,7 @@ def get_runtime(self, name: str) -> types.Runtime: f"{self.namespace}/{name}" ) from e - return self.__get_runtime_from_crd(runtime) # type: ignore + return self.__get_runtime_from_cr(runtime) # type: ignore def get_runtime_packages(self, runtime: types.Runtime): if runtime.trainer.trainer_type == types.TrainerType.BUILTIN_TRAINER: @@ -184,53 +183,15 @@ def train( initializer: Optional[types.Initializer] = None, trainer: Optional[Union[types.CustomTrainer, types.BuiltinTrainer]] = None, ) -> str: - if runtime is None: - runtime = self.get_runtime(constants.TORCH_RUNTIME) - # Generate unique name for the TrainJob. - # TODO (andreyvelich): Discuss this TrainJob name generation. train_job_name = random.choice(string.ascii_lowercase) + uuid.uuid4().hex[:11] - # Build the Trainer. - trainer_crd = models.TrainerV1alpha1Trainer() - - if trainer: - # If users choose to use a custom training function. - if isinstance(trainer, types.CustomTrainer): - if runtime.trainer.trainer_type != types.TrainerType.CUSTOM_TRAINER: - raise ValueError(f"CustomTrainer can't be used with {runtime} runtime") - trainer_crd = utils.get_trainer_crd_from_custom_trainer(runtime, trainer) - - # If users choose to use a builtin trainer for post-training. - elif isinstance(trainer, types.BuiltinTrainer): - if runtime.trainer.trainer_type != types.TrainerType.BUILTIN_TRAINER: - raise ValueError(f"BuiltinTrainer can't be used with {runtime} runtime") - trainer_crd = utils.get_trainer_crd_from_builtin_trainer( - runtime, trainer, initializer - ) - - else: - raise ValueError( - f"The trainer type {type(trainer)} is not supported. " - "Please use CustomTrainer or BuiltinTrainer." - ) - + # Build the TrainJob. train_job = models.TrainerV1alpha1TrainJob( apiVersion=constants.API_VERSION, kind=constants.TRAINJOB_KIND, metadata=models.IoK8sApimachineryPkgApisMetaV1ObjectMeta(name=train_job_name), - spec=models.TrainerV1alpha1TrainJobSpec( - runtimeRef=models.TrainerV1alpha1RuntimeRef(name=runtime.name), - trainer=(trainer_crd if trainer_crd != models.TrainerV1alpha1Trainer() else None), - initializer=( - models.TrainerV1alpha1Initializer( - dataset=utils.get_dataset_initializer(initializer.dataset), - model=utils.get_model_initializer(initializer.model), - ) - if isinstance(initializer, types.Initializer) - else None - ), - ), + spec=self._get_trainjob_spec(runtime, initializer, trainer), ) # Create the TrainJob. @@ -269,7 +230,7 @@ def list_jobs(self, runtime: Optional[types.Runtime] = None) -> list[types.Train ) trainjob_list = models.TrainerV1alpha1TrainJobList.from_dict( - thread.get(constants.DEFAULT_TIMEOUT) + thread.get(common_constants.DEFAULT_TIMEOUT) ) if not trainjob_list: @@ -285,7 +246,7 @@ def list_jobs(self, runtime: Optional[types.Runtime] = None) -> list[types.Train ): continue - result.append(self.__get_trainjob_from_crd(trainjob)) + result.append(self.__get_trainjob_from_cr(trainjob)) except multiprocessing.TimeoutError as e: raise TimeoutError( @@ -312,7 +273,7 @@ def get_job(self, name: str) -> types.TrainJob: ) trainjob = models.TrainerV1alpha1TrainJob.from_dict( - thread.get(constants.DEFAULT_TIMEOUT) # type: ignore + thread.get(common_constants.DEFAULT_TIMEOUT) # type: ignore ) except multiprocessing.TimeoutError as e: @@ -324,7 +285,7 @@ def get_job(self, name: str) -> types.TrainJob: f"Failed to get {constants.TRAINJOB_KIND}: {self.namespace}/{name}" ) from e - return self.__get_trainjob_from_crd(trainjob) # type: ignore + return self.__get_trainjob_from_cr(trainjob) # type: ignore def get_job_logs( self, @@ -355,7 +316,7 @@ def get_job_logs( ) # Stream logs incrementally. - yield from log_stream + yield from log_stream # type: ignore else: logs = self.core_api.read_namespaced_pod_log( name=pod_name, @@ -431,66 +392,66 @@ def delete_job(self, name: str): logger.debug(f"{constants.TRAINJOB_KIND} {self.namespace}/{name} has been deleted") - def __get_runtime_from_crd( + def __get_runtime_from_cr( self, - runtime_crd: models.TrainerV1alpha1ClusterTrainingRuntime, + runtime_cr: models.TrainerV1alpha1ClusterTrainingRuntime, ) -> types.Runtime: if not ( - runtime_crd.metadata - and runtime_crd.metadata.name - and runtime_crd.spec - and runtime_crd.spec.ml_policy - and runtime_crd.spec.template.spec - and runtime_crd.spec.template.spec.replicated_jobs + runtime_cr.metadata + and runtime_cr.metadata.name + and runtime_cr.spec + and runtime_cr.spec.ml_policy + and runtime_cr.spec.template.spec + and runtime_cr.spec.template.spec.replicated_jobs ): - raise Exception(f"ClusterTrainingRuntime CRD is invalid: {runtime_crd}") + raise Exception(f"ClusterTrainingRuntime CR is invalid: {runtime_cr}") if not ( - runtime_crd.metadata.labels - and constants.RUNTIME_FRAMEWORK_LABEL in runtime_crd.metadata.labels + runtime_cr.metadata.labels + and constants.RUNTIME_FRAMEWORK_LABEL in runtime_cr.metadata.labels ): raise Exception( - f"Runtime {runtime_crd.metadata.name} must have " + f"Runtime {runtime_cr.metadata.name} must have " f"{constants.RUNTIME_FRAMEWORK_LABEL} label" ) return types.Runtime( - name=runtime_crd.metadata.name, + name=runtime_cr.metadata.name, trainer=utils.get_runtime_trainer( - runtime_crd.metadata.labels[constants.RUNTIME_FRAMEWORK_LABEL], - runtime_crd.spec.template.spec.replicated_jobs, - runtime_crd.spec.ml_policy, + runtime_cr.metadata.labels[constants.RUNTIME_FRAMEWORK_LABEL], + runtime_cr.spec.template.spec.replicated_jobs, + runtime_cr.spec.ml_policy, ), ) - def __get_trainjob_from_crd( + def __get_trainjob_from_cr( self, - trainjob_crd: models.TrainerV1alpha1TrainJob, + trainjob_cr: models.TrainerV1alpha1TrainJob, ) -> types.TrainJob: if not ( - trainjob_crd.metadata - and trainjob_crd.metadata.name - and trainjob_crd.metadata.namespace - and trainjob_crd.spec - and trainjob_crd.metadata.creation_timestamp + trainjob_cr.metadata + and trainjob_cr.metadata.name + and trainjob_cr.metadata.namespace + and trainjob_cr.spec + and trainjob_cr.metadata.creation_timestamp ): - raise Exception(f"TrainJob CRD is invalid: {trainjob_crd}") + raise Exception(f"TrainJob CR is invalid: {trainjob_cr}") - name = trainjob_crd.metadata.name - namespace = trainjob_crd.metadata.namespace + name = trainjob_cr.metadata.name + namespace = trainjob_cr.metadata.namespace - runtime = self.get_runtime(trainjob_crd.spec.runtime_ref.name) + runtime = self.get_runtime(trainjob_cr.spec.runtime_ref.name) - # Construct the TrainJob from the CRD. + # Construct the TrainJob from the CR. trainjob = types.TrainJob( name=name, - creation_timestamp=trainjob_crd.metadata.creation_timestamp, + creation_timestamp=trainjob_cr.metadata.creation_timestamp, runtime=runtime, steps=[], # Number of nodes is taken from TrainJob or TrainingRuntime num_nodes=( - trainjob_crd.spec.trainer.num_nodes - if trainjob_crd.spec.trainer and trainjob_crd.spec.trainer.num_nodes + trainjob_cr.spec.trainer.num_nodes + if trainjob_cr.spec.trainer and trainjob_cr.spec.trainer.num_nodes else runtime.trainer.num_nodes ), status=constants.TRAINJOB_CREATED, # The default TrainJob status. @@ -502,7 +463,7 @@ def __get_trainjob_from_crd( namespace, label_selector=constants.POD_LABEL_SELECTOR.format(trainjob_name=name), async_req=True, - ).get(constants.DEFAULT_TIMEOUT) + ).get(common_constants.DEFAULT_TIMEOUT) # Convert Pod to the correct format. pod_list = models.IoK8sApiCoreV1PodList.from_dict(response.to_dict()) @@ -552,8 +513,8 @@ def __get_trainjob_from_crd( ) from e # Update the TrainJob status from its conditions. - if trainjob_crd.status and trainjob_crd.status.conditions: - for c in trainjob_crd.status.conditions: + if trainjob_cr.status and trainjob_cr.status.conditions: + for c in trainjob_cr.status.conditions: if ( c.type == constants.TRAINJOB_COMPLETE and c.status == "True" @@ -578,3 +539,50 @@ def __get_trainjob_from_crd( trainjob.status = constants.TRAINJOB_RUNNING return trainjob + + def _get_trainjob_spec( + self, + runtime: Optional[types.Runtime] = None, + initializer: Optional[types.Initializer] = None, + trainer: Optional[Union[types.CustomTrainer, types.BuiltinTrainer]] = None, + ) -> models.TrainerV1alpha1TrainJobSpec: + """Get TrainJob spec from the given parameters""" + if runtime is None: + runtime = self.get_runtime(constants.TORCH_RUNTIME) + + # Build the Trainer. + trainer_cr = models.TrainerV1alpha1Trainer() + + if trainer: + # If users choose to use a custom training function. + if isinstance(trainer, types.CustomTrainer): + if runtime.trainer.trainer_type != types.TrainerType.CUSTOM_TRAINER: + raise ValueError(f"CustomTrainer can't be used with {runtime} runtime") + trainer_cr = utils.get_trainer_cr_from_custom_trainer(runtime, trainer) + + # If users choose to use a builtin trainer for post-training. + elif isinstance(trainer, types.BuiltinTrainer): + if runtime.trainer.trainer_type != types.TrainerType.BUILTIN_TRAINER: + raise ValueError(f"BuiltinTrainer can't be used with {runtime} runtime") + trainer_cr = utils.get_trainer_cr_from_builtin_trainer( + runtime, trainer, initializer + ) + + else: + raise ValueError( + f"The trainer type {type(trainer)} is not supported. " + "Please use CustomTrainer or BuiltinTrainer." + ) + + return models.TrainerV1alpha1TrainJobSpec( + runtimeRef=models.TrainerV1alpha1RuntimeRef(name=runtime.name), + trainer=(trainer_cr if trainer_cr != models.TrainerV1alpha1Trainer() else None), + initializer=( + models.TrainerV1alpha1Initializer( + dataset=utils.get_dataset_initializer(initializer.dataset), + model=utils.get_model_initializer(initializer.model), + ) + if isinstance(initializer, types.Initializer) + else None + ), + ) diff --git a/kubeflow/trainer/backends/kubernetes/backend_test.py b/kubeflow/trainer/backends/kubernetes/backend_test.py index 85c71c461..162f70e78 100644 --- a/kubeflow/trainer/backends/kubernetes/backend_test.py +++ b/kubeflow/trainer/backends/kubernetes/backend_test.py @@ -31,8 +31,9 @@ from kubeflow_trainer_api import models import pytest +from kubeflow.common.types import KubernetesBackendConfig from kubeflow.trainer.backends.kubernetes.backend import KubernetesBackend -from kubeflow.trainer.backends.kubernetes.types import KubernetesBackendConfig +import kubeflow.trainer.backends.kubernetes.utils as utils from kubeflow.trainer.constants import constants from kubeflow.trainer.test.common import ( DEFAULT_NAMESPACE, @@ -43,7 +44,6 @@ TestCase, ) from kubeflow.trainer.types import types -from kubeflow.trainer.utils import utils # In all tests runtime name is equal to the framework name. TORCH_RUNTIME = "torch" diff --git a/kubeflow/trainer/utils/utils.py b/kubeflow/trainer/backends/kubernetes/utils.py similarity index 92% rename from kubeflow/trainer/utils/utils.py rename to kubeflow/trainer/backends/kubernetes/utils.py index 0cefd0467..ade5707cf 100644 --- a/kubeflow/trainer/utils/utils.py +++ b/kubeflow/trainer/backends/kubernetes/utils.py @@ -20,33 +20,11 @@ from urllib.parse import urlparse from kubeflow_trainer_api import models -from kubernetes import config from kubeflow.trainer.constants import constants from kubeflow.trainer.types import types -def is_running_in_k8s() -> bool: - return os.path.isdir("/var/run/secrets/kubernetes.io/") - - -def get_default_target_namespace(context: Optional[str] = None) -> str: - if not is_running_in_k8s(): - try: - all_contexts, current_context = config.list_kube_config_contexts() - # If context is set, we should get namespace from it. - if context: - for c in all_contexts: - if isinstance(c, dict) and c.get("name") == context: - return c["context"]["namespace"] - # Otherwise, try to get namespace from the current context. - return current_context["context"]["namespace"] - except Exception: - return constants.DEFAULT_NAMESPACE - with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace") as f: - return f.readline() - - def get_container_devices( resources: Optional[models.IoK8sApiCoreV1ResourceRequirements], ) -> Optional[tuple[str, str]]: @@ -366,26 +344,26 @@ def get_command_using_train_func( return command -def get_trainer_crd_from_custom_trainer( +def get_trainer_cr_from_custom_trainer( runtime: types.Runtime, trainer: types.CustomTrainer, ) -> models.TrainerV1alpha1Trainer: """ - Get the Trainer CRD from the custom trainer. + Get the Trainer CR from the custom trainer. """ - trainer_crd = models.TrainerV1alpha1Trainer() + trainer_cr = models.TrainerV1alpha1Trainer() # Add number of nodes to the Trainer. if trainer.num_nodes: - trainer_crd.num_nodes = trainer.num_nodes + trainer_cr.num_nodes = trainer.num_nodes # Add resources per node to the Trainer. if trainer.resources_per_node: - trainer_crd.resources_per_node = get_resources_per_node(trainer.resources_per_node) + trainer_cr.resources_per_node = get_resources_per_node(trainer.resources_per_node) # Add command to the Trainer. # TODO: Support train function parameters. - trainer_crd.command = get_command_using_train_func( + trainer_cr.command = get_command_using_train_func( runtime, trainer.func, trainer.func_args, @@ -395,41 +373,41 @@ def get_trainer_crd_from_custom_trainer( # Add environment variables to the Trainer. if trainer.env: - trainer_crd.env = [ + trainer_cr.env = [ models.IoK8sApiCoreV1EnvVar(name=key, value=value) for key, value in trainer.env.items() ] - return trainer_crd + return trainer_cr -def get_trainer_crd_from_builtin_trainer( +def get_trainer_cr_from_builtin_trainer( runtime: types.Runtime, trainer: types.BuiltinTrainer, initializer: Optional[types.Initializer] = None, ) -> models.TrainerV1alpha1Trainer: """ - Get the Trainer CRD from the builtin trainer. + Get the Trainer CR from the builtin trainer. """ if not isinstance(trainer.config, types.TorchTuneConfig): raise ValueError(f"The BuiltinTrainer config is invalid: {trainer.config}") - trainer_crd = models.TrainerV1alpha1Trainer() + trainer_cr = models.TrainerV1alpha1Trainer() # Add number of nodes to the Trainer. if trainer.config.num_nodes: - trainer_crd.num_nodes = trainer.config.num_nodes + trainer_cr.num_nodes = trainer.config.num_nodes # Add resources per node to the Trainer. if trainer.config.resources_per_node: - trainer_crd.resources_per_node = get_resources_per_node(trainer.config.resources_per_node) + trainer_cr.resources_per_node = get_resources_per_node(trainer.config.resources_per_node) - trainer_crd.command = list(runtime.trainer.command) + trainer_cr.command = list(runtime.trainer.command) # Parse args in the TorchTuneConfig to the Trainer, preparing for the mutation of # the torchtune config in the runtime plugin. # Ref:https://github.com/kubeflow/trainer/tree/master/docs/proposals/2401-llm-trainer-v2 - trainer_crd.args = get_args_using_torchtune_config(trainer.config, initializer) + trainer_cr.args = get_args_using_torchtune_config(trainer.config, initializer) - return trainer_crd + return trainer_cr def get_args_using_torchtune_config( diff --git a/kubeflow/trainer/utils/utils_test.py b/kubeflow/trainer/backends/kubernetes/utils_test.py similarity index 99% rename from kubeflow/trainer/utils/utils_test.py rename to kubeflow/trainer/backends/kubernetes/utils_test.py index 441f9115d..835d8a62e 100644 --- a/kubeflow/trainer/utils/utils_test.py +++ b/kubeflow/trainer/backends/kubernetes/utils_test.py @@ -14,10 +14,10 @@ import pytest +import kubeflow.trainer.backends.kubernetes.utils as utils from kubeflow.trainer.constants import constants from kubeflow.trainer.test.common import FAILED, SUCCESS, TestCase from kubeflow.trainer.types import types -from kubeflow.trainer.utils import utils def _build_runtime() -> types.Runtime: diff --git a/kubeflow/trainer/backends/localprocess/backend.py b/kubeflow/trainer/backends/localprocess/backend.py index d10a5b10f..a2caf2202 100644 --- a/kubeflow/trainer/backends/localprocess/backend.py +++ b/kubeflow/trainer/backends/localprocess/backend.py @@ -20,7 +20,7 @@ from typing import Optional, Union import uuid -from kubeflow.trainer.backends.base import ExecutionBackend +from kubeflow.trainer.backends.base import RuntimeBackend from kubeflow.trainer.backends.localprocess import utils as local_utils from kubeflow.trainer.backends.localprocess.constants import local_runtimes from kubeflow.trainer.backends.localprocess.job import LocalJob @@ -35,7 +35,7 @@ logger = logging.getLogger(__name__) -class LocalProcessBackend(ExecutionBackend): +class LocalProcessBackend(RuntimeBackend): def __init__( self, cfg: LocalProcessBackendConfig, @@ -142,7 +142,7 @@ def list_jobs(self, runtime: Optional[types.Runtime] = None) -> list[types.Train ) return result - def get_job(self, name: str) -> Optional[types.TrainJob]: + def get_job(self, name: str) -> types.TrainJob: _job = next((j for j in self.__local_jobs if j.name == name), None) if _job is None: raise ValueError(f"No TrainJob with name {name}") diff --git a/kubeflow/trainer/backends/localprocess/constants.py b/kubeflow/trainer/backends/localprocess/constants.py index c7205cc68..962b73726 100644 --- a/kubeflow/trainer/backends/localprocess/constants.py +++ b/kubeflow/trainer/backends/localprocess/constants.py @@ -15,6 +15,7 @@ import re import textwrap +import kubeflow.common.constants as common_constants from kubeflow.trainer.backends.localprocess import types from kubeflow.trainer.constants import constants from kubeflow.trainer.types import types as base_types @@ -28,8 +29,8 @@ trainer_type=base_types.TrainerType.CUSTOM_TRAINER, framework=TORCH_FRAMEWORK_TYPE, num_nodes=1, - device_count=constants.UNKNOWN, - device=constants.UNKNOWN, + device_count=common_constants.UNKNOWN, + device=common_constants.UNKNOWN, packages=["torch"], ), ) diff --git a/kubeflow/trainer/constants/constants.py b/kubeflow/trainer/constants/constants.py index 015498f9a..5356bc5a2 100644 --- a/kubeflow/trainer/constants/constants.py +++ b/kubeflow/trainer/constants/constants.py @@ -15,17 +15,11 @@ import os import textwrap -# How long to wait in seconds for requests to the Kubernetes API Server. -DEFAULT_TIMEOUT = 120 - # Common constants. GROUP = "trainer.kubeflow.org" VERSION = "v1alpha1" API_VERSION = f"{GROUP}/{VERSION}" -# The default Kubernetes namespace. -DEFAULT_NAMESPACE = "default" - # The Kind name for the ClusterTrainingRuntime. CLUSTER_TRAINING_RUNTIME_KIND = "ClusterTrainingRuntime" @@ -51,7 +45,7 @@ # The failed status of the TrainJob, defined when TrainJob CR has failed condition. TRAINJOB_FAILED = "Failed" -# The succeeded phase of the Pod. +# The succeeded phase of the Pods. POD_SUCCEEDED = "Succeeded" # The label key to identify the relationship between TrainJob and Pod template in the runtime. @@ -89,9 +83,6 @@ # single VM where distributed training code is executed. NODE = "node" -# Unknown indicates that the value can't be identified. -UNKNOWN = "Unknown" - # The label for cpu in the container resources. CPU_LABEL = "cpu" diff --git a/kubeflow/trainer/types/types.py b/kubeflow/trainer/types/types.py index c32c44834..bcba4c078 100644 --- a/kubeflow/trainer/types/types.py +++ b/kubeflow/trainer/types/types.py @@ -18,6 +18,7 @@ from enum import Enum from typing import Callable, Optional, Union +import kubeflow.common.constants as common_constants from kubeflow.trainer.constants import constants @@ -212,8 +213,8 @@ class RuntimeTrainer: trainer_type: TrainerType framework: str num_nodes: int = 1 # The default value is set in the APIs. - device: str = constants.UNKNOWN - device_count: str = constants.UNKNOWN + device: str = common_constants.UNKNOWN + device_count: str = common_constants.UNKNOWN __command: tuple[str, ...] = field(init=False, repr=False) @property @@ -238,20 +239,19 @@ class Step: name: str status: Optional[str] pod_name: str - device: str = constants.UNKNOWN - device_count: str = constants.UNKNOWN + device: str = common_constants.UNKNOWN + device_count: str = common_constants.UNKNOWN # Representation for the TrainJob. -# TODO (andreyvelich): Discuss what fields users want to get. @dataclass class TrainJob: name: str - creation_timestamp: datetime runtime: Runtime steps: list[Step] num_nodes: int - status: str = constants.UNKNOWN + creation_timestamp: datetime + status: str = common_constants.UNKNOWN # Configuration for the HuggingFace dataset initializer. @@ -330,3 +330,27 @@ class Initializer: dataset: Optional[Union[HuggingFaceDatasetInitializer, DataCacheInitializer]] = None model: Optional[HuggingFaceModelInitializer] = None + + +# TODO (andreyvelich): Add train() and optimize() methods to this class. +@dataclass +class TrainJobTemplate: + """TrainJob template configuration. + + Args: + trainer (`CustomTrainer`): Configuration for a CustomTrainer. + runtime (`Optional[Runtime]`): Optional, reference to one of the existing runtimes. Defaults + to the torch-distributed runtime if not provided. + initializer (`Optional[Initializer]`): Optional configuration for the dataset and model + initializers. + """ + + trainer: CustomTrainer + runtime: Optional[Runtime] = None + initializer: Optional[Initializer] = None + + def keys(self): + return ["trainer", "runtime", "initializer"] + + def __getitem__(self, key): + return getattr(self, key) diff --git a/pyproject.toml b/pyproject.toml index 0f3072dcd..bcf11c1c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,6 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Education", "Intended Audience :: Science/Research", - # TODO (andreyvelich): Check Python version for Kubeflow Trainer. "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -30,6 +29,7 @@ dependencies = [ "kubernetes>=27.2.0", "pydantic>=2.10.0", "kubeflow-trainer-api>=2.0.0", + "kubeflow-katib-api>=0.19.0", ] [dependency-groups] @@ -38,6 +38,7 @@ dev = [ "pytest-mock>=3.10", "coverage>=7.0", "kubeflow_trainer_api@git+https://github.com/kubeflow/trainer.git@master#subdirectory=api/python_api", + "kubeflow_katib_api@git+https://github.com/kubeflow/katib.git@master#subdirectory=api/python_api", "ruff>=0.12.2", "pre-commit>=4.2.0", "PyGithub>=2.7.0", @@ -68,7 +69,12 @@ line-length = 100 target-version = "py39" src = ["kubeflow"] extend-exclude = [ - ".venv", "venv", "build", "dist", "__pycache__", "docs/_build" + ".venv", + "venv", + "build", + "dist", + "__pycache__", + "docs/_build", ] [tool.ruff.format] @@ -90,11 +96,11 @@ select = [ "N", # pep8-naming "B", # flake8-bugbear "C4", # flake8-comprehensions - "SIM" # flake8-simplify + "SIM", # flake8-simplify ] ignore = [ - "B006" # mutable-argument-default + "B006", # mutable-argument-default ] diff --git a/uv.lock b/uv.lock index fe6ae8445..37b6e544b 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.9" [[package]] @@ -36,66 +36,66 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 }, - { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 }, - { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 }, - { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 }, - { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 }, - { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 }, - { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 }, - { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 }, - { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 }, - { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 }, - { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 }, - { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 }, - { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, - { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, - { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, - { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, - { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, - { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, - { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, - { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, - { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, - { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, - { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, - { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, - { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, - { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, - { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, - { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, - { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, - { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, - { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, - { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, - { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, - { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, - { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, - { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, - { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, - { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, - { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, - { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, - { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, - { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, - { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, - { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, - { url = "https://files.pythonhosted.org/packages/b9/ea/8bb50596b8ffbc49ddd7a1ad305035daa770202a6b782fc164647c2673ad/cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", size = 182220 }, - { url = "https://files.pythonhosted.org/packages/ae/11/e77c8cd24f58285a82c23af484cf5b124a376b32644e445960d1a4654c3a/cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", size = 178605 }, - { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910 }, - { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200 }, - { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565 }, - { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635 }, - { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218 }, - { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486 }, - { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911 }, - { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632 }, - { url = "https://files.pythonhosted.org/packages/cb/b5/fd9f8b5a84010ca169ee49f4e4ad6f8c05f4e3545b72ee041dbbcb159882/cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", size = 171820 }, - { url = "https://files.pythonhosted.org/packages/8c/52/b08750ce0bce45c143e1b5d7357ee8c55341b52bdef4b0f081af1eb248c2/cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", size = 181290 }, +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191, upload-time = "2024-09-04T20:43:30.027Z" }, + { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592, upload-time = "2024-09-04T20:43:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024, upload-time = "2024-09-04T20:43:34.186Z" }, + { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188, upload-time = "2024-09-04T20:43:36.286Z" }, + { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571, upload-time = "2024-09-04T20:43:38.586Z" }, + { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687, upload-time = "2024-09-04T20:43:40.084Z" }, + { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211, upload-time = "2024-09-04T20:43:41.526Z" }, + { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325, upload-time = "2024-09-04T20:43:43.117Z" }, + { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784, upload-time = "2024-09-04T20:43:45.256Z" }, + { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564, upload-time = "2024-09-04T20:43:46.779Z" }, + { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804, upload-time = "2024-09-04T20:43:48.186Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299, upload-time = "2024-09-04T20:43:49.812Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ea/8bb50596b8ffbc49ddd7a1ad305035daa770202a6b782fc164647c2673ad/cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", size = 182220, upload-time = "2024-09-04T20:45:01.577Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/e77c8cd24f58285a82c23af484cf5b124a376b32644e445960d1a4654c3a/cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", size = 178605, upload-time = "2024-09-04T20:45:03.837Z" }, + { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910, upload-time = "2024-09-04T20:45:05.315Z" }, + { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200, upload-time = "2024-09-04T20:45:06.903Z" }, + { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565, upload-time = "2024-09-04T20:45:08.975Z" }, + { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635, upload-time = "2024-09-04T20:45:10.64Z" }, + { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218, upload-time = "2024-09-04T20:45:12.366Z" }, + { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486, upload-time = "2024-09-04T20:45:13.935Z" }, + { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911, upload-time = "2024-09-04T20:45:15.696Z" }, + { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632, upload-time = "2024-09-04T20:45:17.284Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b5/fd9f8b5a84010ca169ee49f4e4ad6f8c05f4e3545b72ee041dbbcb159882/cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", size = 171820, upload-time = "2024-09-04T20:45:18.762Z" }, + { url = "https://files.pythonhosted.org/packages/8c/52/b08750ce0bce45c143e1b5d7357ee8c55341b52bdef4b0f081af1eb248c2/cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", size = 181290, upload-time = "2024-09-04T20:45:20.226Z" }, ] [[package]] @@ -292,44 +292,44 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d6/0d/d13399c94234ee8f3df384819dc67e0c5ce215fb751d567a55a1f4b028c7/cryptography-45.0.6.tar.gz", hash = "sha256:5c966c732cf6e4a276ce83b6e4c729edda2df6929083a952cc7da973c539c719", size = 744949 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/29/2793d178d0eda1ca4a09a7c4e09a5185e75738cc6d526433e8663b460ea6/cryptography-45.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:048e7ad9e08cf4c0ab07ff7f36cc3115924e22e2266e034450a890d9e312dd74", size = 7042702 }, - { url = "https://files.pythonhosted.org/packages/b3/b6/cabd07410f222f32c8d55486c464f432808abaa1f12af9afcbe8f2f19030/cryptography-45.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44647c5d796f5fc042bbc6d61307d04bf29bccb74d188f18051b635f20a9c75f", size = 4206483 }, - { url = "https://files.pythonhosted.org/packages/8b/9e/f9c7d36a38b1cfeb1cc74849aabe9bf817990f7603ff6eb485e0d70e0b27/cryptography-45.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e40b80ecf35ec265c452eea0ba94c9587ca763e739b8e559c128d23bff7ebbbf", size = 4429679 }, - { url = "https://files.pythonhosted.org/packages/9c/2a/4434c17eb32ef30b254b9e8b9830cee4e516f08b47fdd291c5b1255b8101/cryptography-45.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:00e8724bdad672d75e6f069b27970883179bd472cd24a63f6e620ca7e41cc0c5", size = 4210553 }, - { url = "https://files.pythonhosted.org/packages/ef/1d/09a5df8e0c4b7970f5d1f3aff1b640df6d4be28a64cae970d56c6cf1c772/cryptography-45.0.6-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a3085d1b319d35296176af31c90338eeb2ddac8104661df79f80e1d9787b8b2", size = 3894499 }, - { url = "https://files.pythonhosted.org/packages/79/62/120842ab20d9150a9d3a6bdc07fe2870384e82f5266d41c53b08a3a96b34/cryptography-45.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1b7fa6a1c1188c7ee32e47590d16a5a0646270921f8020efc9a511648e1b2e08", size = 4458484 }, - { url = "https://files.pythonhosted.org/packages/fd/80/1bc3634d45ddfed0871bfba52cf8f1ad724761662a0c792b97a951fb1b30/cryptography-45.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:275ba5cc0d9e320cd70f8e7b96d9e59903c815ca579ab96c1e37278d231fc402", size = 4210281 }, - { url = "https://files.pythonhosted.org/packages/7d/fe/ffb12c2d83d0ee625f124880a1f023b5878f79da92e64c37962bbbe35f3f/cryptography-45.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42", size = 4456890 }, - { url = "https://files.pythonhosted.org/packages/8c/8e/b3f3fe0dc82c77a0deb5f493b23311e09193f2268b77196ec0f7a36e3f3e/cryptography-45.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee411a1b977f40bd075392c80c10b58025ee5c6b47a822a33c1198598a7a5f05", size = 4333247 }, - { url = "https://files.pythonhosted.org/packages/b3/a6/c3ef2ab9e334da27a1d7b56af4a2417d77e7806b2e0f90d6267ce120d2e4/cryptography-45.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2a21a8eda2d86bb604934b6b37691585bd095c1f788530c1fcefc53a82b3453", size = 4565045 }, - { url = "https://files.pythonhosted.org/packages/31/c3/77722446b13fa71dddd820a5faab4ce6db49e7e0bf8312ef4192a3f78e2f/cryptography-45.0.6-cp311-abi3-win32.whl", hash = "sha256:d063341378d7ee9c91f9d23b431a3502fc8bfacd54ef0a27baa72a0843b29159", size = 2928923 }, - { url = "https://files.pythonhosted.org/packages/38/63/a025c3225188a811b82932a4dcc8457a26c3729d81578ccecbcce2cb784e/cryptography-45.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:833dc32dfc1e39b7376a87b9a6a4288a10aae234631268486558920029b086ec", size = 3403805 }, - { url = "https://files.pythonhosted.org/packages/5b/af/bcfbea93a30809f126d51c074ee0fac5bd9d57d068edf56c2a73abedbea4/cryptography-45.0.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:3436128a60a5e5490603ab2adbabc8763613f638513ffa7d311c900a8349a2a0", size = 7020111 }, - { url = "https://files.pythonhosted.org/packages/98/c6/ea5173689e014f1a8470899cd5beeb358e22bb3cf5a876060f9d1ca78af4/cryptography-45.0.6-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0d9ef57b6768d9fa58e92f4947cea96ade1233c0e236db22ba44748ffedca394", size = 4198169 }, - { url = "https://files.pythonhosted.org/packages/ba/73/b12995edc0c7e2311ffb57ebd3b351f6b268fed37d93bfc6f9856e01c473/cryptography-45.0.6-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea3c42f2016a5bbf71825537c2ad753f2870191134933196bee408aac397b3d9", size = 4421273 }, - { url = "https://files.pythonhosted.org/packages/f7/6e/286894f6f71926bc0da67408c853dd9ba953f662dcb70993a59fd499f111/cryptography-45.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:20ae4906a13716139d6d762ceb3e0e7e110f7955f3bc3876e3a07f5daadec5f3", size = 4199211 }, - { url = "https://files.pythonhosted.org/packages/de/34/a7f55e39b9623c5cb571d77a6a90387fe557908ffc44f6872f26ca8ae270/cryptography-45.0.6-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dac5ec199038b8e131365e2324c03d20e97fe214af051d20c49db129844e8b3", size = 3883732 }, - { url = "https://files.pythonhosted.org/packages/f9/b9/c6d32edbcba0cd9f5df90f29ed46a65c4631c4fbe11187feb9169c6ff506/cryptography-45.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f878a34b90d688982e43f4b700408b478102dd58b3e39de21b5ebf6509c301", size = 4450655 }, - { url = "https://files.pythonhosted.org/packages/77/2d/09b097adfdee0227cfd4c699b3375a842080f065bab9014248933497c3f9/cryptography-45.0.6-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5bd6020c80c5b2b2242d6c48487d7b85700f5e0038e67b29d706f98440d66eb5", size = 4198956 }, - { url = "https://files.pythonhosted.org/packages/55/66/061ec6689207d54effdff535bbdf85cc380d32dd5377173085812565cf38/cryptography-45.0.6-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:eccddbd986e43014263eda489abbddfbc287af5cddfd690477993dbb31e31016", size = 4449859 }, - { url = "https://files.pythonhosted.org/packages/41/ff/e7d5a2ad2d035e5a2af116e1a3adb4d8fcd0be92a18032917a089c6e5028/cryptography-45.0.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:550ae02148206beb722cfe4ef0933f9352bab26b087af00e48fdfb9ade35c5b3", size = 4320254 }, - { url = "https://files.pythonhosted.org/packages/82/27/092d311af22095d288f4db89fcaebadfb2f28944f3d790a4cf51fe5ddaeb/cryptography-45.0.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b64e668fc3528e77efa51ca70fadcd6610e8ab231e3e06ae2bab3b31c2b8ed9", size = 4554815 }, - { url = "https://files.pythonhosted.org/packages/7e/01/aa2f4940262d588a8fdf4edabe4cda45854d00ebc6eaac12568b3a491a16/cryptography-45.0.6-cp37-abi3-win32.whl", hash = "sha256:780c40fb751c7d2b0c6786ceee6b6f871e86e8718a8ff4bc35073ac353c7cd02", size = 2912147 }, - { url = "https://files.pythonhosted.org/packages/0a/bc/16e0276078c2de3ceef6b5a34b965f4436215efac45313df90d55f0ba2d2/cryptography-45.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:20d15aed3ee522faac1a39fbfdfee25d17b1284bafd808e1640a74846d7c4d1b", size = 3390459 }, - { url = "https://files.pythonhosted.org/packages/56/d2/4482d97c948c029be08cb29854a91bd2ae8da7eb9c4152461f1244dcea70/cryptography-45.0.6-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:705bb7c7ecc3d79a50f236adda12ca331c8e7ecfbea51edd931ce5a7a7c4f012", size = 3576812 }, - { url = "https://files.pythonhosted.org/packages/ec/24/55fc238fcaa122855442604b8badb2d442367dfbd5a7ca4bb0bd346e263a/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:826b46dae41a1155a0c0e66fafba43d0ede1dc16570b95e40c4d83bfcf0a451d", size = 4141694 }, - { url = "https://files.pythonhosted.org/packages/f9/7e/3ea4fa6fbe51baf3903806a0241c666b04c73d2358a3ecce09ebee8b9622/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cc4d66f5dc4dc37b89cfef1bd5044387f7a1f6f0abb490815628501909332d5d", size = 4375010 }, - { url = "https://files.pythonhosted.org/packages/50/42/ec5a892d82d2a2c29f80fc19ced4ba669bca29f032faf6989609cff1f8dc/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f68f833a9d445cc49f01097d95c83a850795921b3f7cc6488731e69bde3288da", size = 4141377 }, - { url = "https://files.pythonhosted.org/packages/e7/d7/246c4c973a22b9c2931999da953a2c19cae7c66b9154c2d62ffed811225e/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3b5bf5267e98661b9b888a9250d05b063220dfa917a8203744454573c7eb79db", size = 4374609 }, - { url = "https://files.pythonhosted.org/packages/78/6d/c49ccf243f0a1b0781c2a8de8123ee552f0c8a417c6367a24d2ecb7c11b3/cryptography-45.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2384f2ab18d9be88a6e4f8972923405e2dbb8d3e16c6b43f15ca491d7831bd18", size = 3322156 }, - { url = "https://files.pythonhosted.org/packages/61/69/c252de4ec047ba2f567ecb53149410219577d408c2aea9c989acae7eafce/cryptography-45.0.6-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fc022c1fa5acff6def2fc6d7819bbbd31ccddfe67d075331a65d9cfb28a20983", size = 3584669 }, - { url = "https://files.pythonhosted.org/packages/e3/fe/deea71e9f310a31fe0a6bfee670955152128d309ea2d1c79e2a5ae0f0401/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3de77e4df42ac8d4e4d6cdb342d989803ad37707cf8f3fbf7b088c9cbdd46427", size = 4153022 }, - { url = "https://files.pythonhosted.org/packages/60/45/a77452f5e49cb580feedba6606d66ae7b82c128947aa754533b3d1bd44b0/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:599c8d7df950aa68baa7e98f7b73f4f414c9f02d0e8104a30c0182a07732638b", size = 4386802 }, - { url = "https://files.pythonhosted.org/packages/a3/b9/a2f747d2acd5e3075fdf5c145c7c3568895daaa38b3b0c960ef830db6cdc/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:31a2b9a10530a1cb04ffd6aa1cd4d3be9ed49f7d77a4dafe198f3b382f41545c", size = 4152706 }, - { url = "https://files.pythonhosted.org/packages/81/ec/381b3e8d0685a3f3f304a382aa3dfce36af2d76467da0fd4bb21ddccc7b2/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e5b3dda1b00fb41da3af4c5ef3f922a200e33ee5ba0f0bc9ecf0b0c173958385", size = 4386740 }, - { url = "https://files.pythonhosted.org/packages/0a/76/cf8d69da8d0b5ecb0db406f24a63a3f69ba5e791a11b782aeeefef27ccbb/cryptography-45.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:629127cfdcdc6806dfe234734d7cb8ac54edaf572148274fa377a7d3405b0043", size = 3331874 }, +sdist = { url = "https://files.pythonhosted.org/packages/d6/0d/d13399c94234ee8f3df384819dc67e0c5ce215fb751d567a55a1f4b028c7/cryptography-45.0.6.tar.gz", hash = "sha256:5c966c732cf6e4a276ce83b6e4c729edda2df6929083a952cc7da973c539c719", size = 744949, upload-time = "2025-08-05T23:59:27.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/29/2793d178d0eda1ca4a09a7c4e09a5185e75738cc6d526433e8663b460ea6/cryptography-45.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:048e7ad9e08cf4c0ab07ff7f36cc3115924e22e2266e034450a890d9e312dd74", size = 7042702, upload-time = "2025-08-05T23:58:23.464Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b6/cabd07410f222f32c8d55486c464f432808abaa1f12af9afcbe8f2f19030/cryptography-45.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44647c5d796f5fc042bbc6d61307d04bf29bccb74d188f18051b635f20a9c75f", size = 4206483, upload-time = "2025-08-05T23:58:27.132Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9e/f9c7d36a38b1cfeb1cc74849aabe9bf817990f7603ff6eb485e0d70e0b27/cryptography-45.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e40b80ecf35ec265c452eea0ba94c9587ca763e739b8e559c128d23bff7ebbbf", size = 4429679, upload-time = "2025-08-05T23:58:29.152Z" }, + { url = "https://files.pythonhosted.org/packages/9c/2a/4434c17eb32ef30b254b9e8b9830cee4e516f08b47fdd291c5b1255b8101/cryptography-45.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:00e8724bdad672d75e6f069b27970883179bd472cd24a63f6e620ca7e41cc0c5", size = 4210553, upload-time = "2025-08-05T23:58:30.596Z" }, + { url = "https://files.pythonhosted.org/packages/ef/1d/09a5df8e0c4b7970f5d1f3aff1b640df6d4be28a64cae970d56c6cf1c772/cryptography-45.0.6-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a3085d1b319d35296176af31c90338eeb2ddac8104661df79f80e1d9787b8b2", size = 3894499, upload-time = "2025-08-05T23:58:32.03Z" }, + { url = "https://files.pythonhosted.org/packages/79/62/120842ab20d9150a9d3a6bdc07fe2870384e82f5266d41c53b08a3a96b34/cryptography-45.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1b7fa6a1c1188c7ee32e47590d16a5a0646270921f8020efc9a511648e1b2e08", size = 4458484, upload-time = "2025-08-05T23:58:33.526Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/1bc3634d45ddfed0871bfba52cf8f1ad724761662a0c792b97a951fb1b30/cryptography-45.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:275ba5cc0d9e320cd70f8e7b96d9e59903c815ca579ab96c1e37278d231fc402", size = 4210281, upload-time = "2025-08-05T23:58:35.445Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fe/ffb12c2d83d0ee625f124880a1f023b5878f79da92e64c37962bbbe35f3f/cryptography-45.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42", size = 4456890, upload-time = "2025-08-05T23:58:36.923Z" }, + { url = "https://files.pythonhosted.org/packages/8c/8e/b3f3fe0dc82c77a0deb5f493b23311e09193f2268b77196ec0f7a36e3f3e/cryptography-45.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee411a1b977f40bd075392c80c10b58025ee5c6b47a822a33c1198598a7a5f05", size = 4333247, upload-time = "2025-08-05T23:58:38.781Z" }, + { url = "https://files.pythonhosted.org/packages/b3/a6/c3ef2ab9e334da27a1d7b56af4a2417d77e7806b2e0f90d6267ce120d2e4/cryptography-45.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2a21a8eda2d86bb604934b6b37691585bd095c1f788530c1fcefc53a82b3453", size = 4565045, upload-time = "2025-08-05T23:58:40.415Z" }, + { url = "https://files.pythonhosted.org/packages/31/c3/77722446b13fa71dddd820a5faab4ce6db49e7e0bf8312ef4192a3f78e2f/cryptography-45.0.6-cp311-abi3-win32.whl", hash = "sha256:d063341378d7ee9c91f9d23b431a3502fc8bfacd54ef0a27baa72a0843b29159", size = 2928923, upload-time = "2025-08-05T23:58:41.919Z" }, + { url = "https://files.pythonhosted.org/packages/38/63/a025c3225188a811b82932a4dcc8457a26c3729d81578ccecbcce2cb784e/cryptography-45.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:833dc32dfc1e39b7376a87b9a6a4288a10aae234631268486558920029b086ec", size = 3403805, upload-time = "2025-08-05T23:58:43.792Z" }, + { url = "https://files.pythonhosted.org/packages/5b/af/bcfbea93a30809f126d51c074ee0fac5bd9d57d068edf56c2a73abedbea4/cryptography-45.0.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:3436128a60a5e5490603ab2adbabc8763613f638513ffa7d311c900a8349a2a0", size = 7020111, upload-time = "2025-08-05T23:58:45.316Z" }, + { url = "https://files.pythonhosted.org/packages/98/c6/ea5173689e014f1a8470899cd5beeb358e22bb3cf5a876060f9d1ca78af4/cryptography-45.0.6-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0d9ef57b6768d9fa58e92f4947cea96ade1233c0e236db22ba44748ffedca394", size = 4198169, upload-time = "2025-08-05T23:58:47.121Z" }, + { url = "https://files.pythonhosted.org/packages/ba/73/b12995edc0c7e2311ffb57ebd3b351f6b268fed37d93bfc6f9856e01c473/cryptography-45.0.6-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea3c42f2016a5bbf71825537c2ad753f2870191134933196bee408aac397b3d9", size = 4421273, upload-time = "2025-08-05T23:58:48.557Z" }, + { url = "https://files.pythonhosted.org/packages/f7/6e/286894f6f71926bc0da67408c853dd9ba953f662dcb70993a59fd499f111/cryptography-45.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:20ae4906a13716139d6d762ceb3e0e7e110f7955f3bc3876e3a07f5daadec5f3", size = 4199211, upload-time = "2025-08-05T23:58:50.139Z" }, + { url = "https://files.pythonhosted.org/packages/de/34/a7f55e39b9623c5cb571d77a6a90387fe557908ffc44f6872f26ca8ae270/cryptography-45.0.6-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dac5ec199038b8e131365e2324c03d20e97fe214af051d20c49db129844e8b3", size = 3883732, upload-time = "2025-08-05T23:58:52.253Z" }, + { url = "https://files.pythonhosted.org/packages/f9/b9/c6d32edbcba0cd9f5df90f29ed46a65c4631c4fbe11187feb9169c6ff506/cryptography-45.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f878a34b90d688982e43f4b700408b478102dd58b3e39de21b5ebf6509c301", size = 4450655, upload-time = "2025-08-05T23:58:53.848Z" }, + { url = "https://files.pythonhosted.org/packages/77/2d/09b097adfdee0227cfd4c699b3375a842080f065bab9014248933497c3f9/cryptography-45.0.6-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5bd6020c80c5b2b2242d6c48487d7b85700f5e0038e67b29d706f98440d66eb5", size = 4198956, upload-time = "2025-08-05T23:58:55.209Z" }, + { url = "https://files.pythonhosted.org/packages/55/66/061ec6689207d54effdff535bbdf85cc380d32dd5377173085812565cf38/cryptography-45.0.6-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:eccddbd986e43014263eda489abbddfbc287af5cddfd690477993dbb31e31016", size = 4449859, upload-time = "2025-08-05T23:58:56.639Z" }, + { url = "https://files.pythonhosted.org/packages/41/ff/e7d5a2ad2d035e5a2af116e1a3adb4d8fcd0be92a18032917a089c6e5028/cryptography-45.0.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:550ae02148206beb722cfe4ef0933f9352bab26b087af00e48fdfb9ade35c5b3", size = 4320254, upload-time = "2025-08-05T23:58:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/82/27/092d311af22095d288f4db89fcaebadfb2f28944f3d790a4cf51fe5ddaeb/cryptography-45.0.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b64e668fc3528e77efa51ca70fadcd6610e8ab231e3e06ae2bab3b31c2b8ed9", size = 4554815, upload-time = "2025-08-05T23:59:00.283Z" }, + { url = "https://files.pythonhosted.org/packages/7e/01/aa2f4940262d588a8fdf4edabe4cda45854d00ebc6eaac12568b3a491a16/cryptography-45.0.6-cp37-abi3-win32.whl", hash = "sha256:780c40fb751c7d2b0c6786ceee6b6f871e86e8718a8ff4bc35073ac353c7cd02", size = 2912147, upload-time = "2025-08-05T23:59:01.716Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bc/16e0276078c2de3ceef6b5a34b965f4436215efac45313df90d55f0ba2d2/cryptography-45.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:20d15aed3ee522faac1a39fbfdfee25d17b1284bafd808e1640a74846d7c4d1b", size = 3390459, upload-time = "2025-08-05T23:59:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/56/d2/4482d97c948c029be08cb29854a91bd2ae8da7eb9c4152461f1244dcea70/cryptography-45.0.6-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:705bb7c7ecc3d79a50f236adda12ca331c8e7ecfbea51edd931ce5a7a7c4f012", size = 3576812, upload-time = "2025-08-05T23:59:04.833Z" }, + { url = "https://files.pythonhosted.org/packages/ec/24/55fc238fcaa122855442604b8badb2d442367dfbd5a7ca4bb0bd346e263a/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:826b46dae41a1155a0c0e66fafba43d0ede1dc16570b95e40c4d83bfcf0a451d", size = 4141694, upload-time = "2025-08-05T23:59:06.66Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/3ea4fa6fbe51baf3903806a0241c666b04c73d2358a3ecce09ebee8b9622/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cc4d66f5dc4dc37b89cfef1bd5044387f7a1f6f0abb490815628501909332d5d", size = 4375010, upload-time = "2025-08-05T23:59:08.14Z" }, + { url = "https://files.pythonhosted.org/packages/50/42/ec5a892d82d2a2c29f80fc19ced4ba669bca29f032faf6989609cff1f8dc/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f68f833a9d445cc49f01097d95c83a850795921b3f7cc6488731e69bde3288da", size = 4141377, upload-time = "2025-08-05T23:59:09.584Z" }, + { url = "https://files.pythonhosted.org/packages/e7/d7/246c4c973a22b9c2931999da953a2c19cae7c66b9154c2d62ffed811225e/cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3b5bf5267e98661b9b888a9250d05b063220dfa917a8203744454573c7eb79db", size = 4374609, upload-time = "2025-08-05T23:59:11.923Z" }, + { url = "https://files.pythonhosted.org/packages/78/6d/c49ccf243f0a1b0781c2a8de8123ee552f0c8a417c6367a24d2ecb7c11b3/cryptography-45.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2384f2ab18d9be88a6e4f8972923405e2dbb8d3e16c6b43f15ca491d7831bd18", size = 3322156, upload-time = "2025-08-05T23:59:13.597Z" }, + { url = "https://files.pythonhosted.org/packages/61/69/c252de4ec047ba2f567ecb53149410219577d408c2aea9c989acae7eafce/cryptography-45.0.6-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fc022c1fa5acff6def2fc6d7819bbbd31ccddfe67d075331a65d9cfb28a20983", size = 3584669, upload-time = "2025-08-05T23:59:15.431Z" }, + { url = "https://files.pythonhosted.org/packages/e3/fe/deea71e9f310a31fe0a6bfee670955152128d309ea2d1c79e2a5ae0f0401/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3de77e4df42ac8d4e4d6cdb342d989803ad37707cf8f3fbf7b088c9cbdd46427", size = 4153022, upload-time = "2025-08-05T23:59:16.954Z" }, + { url = "https://files.pythonhosted.org/packages/60/45/a77452f5e49cb580feedba6606d66ae7b82c128947aa754533b3d1bd44b0/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:599c8d7df950aa68baa7e98f7b73f4f414c9f02d0e8104a30c0182a07732638b", size = 4386802, upload-time = "2025-08-05T23:59:18.55Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b9/a2f747d2acd5e3075fdf5c145c7c3568895daaa38b3b0c960ef830db6cdc/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:31a2b9a10530a1cb04ffd6aa1cd4d3be9ed49f7d77a4dafe198f3b382f41545c", size = 4152706, upload-time = "2025-08-05T23:59:20.044Z" }, + { url = "https://files.pythonhosted.org/packages/81/ec/381b3e8d0685a3f3f304a382aa3dfce36af2d76467da0fd4bb21ddccc7b2/cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e5b3dda1b00fb41da3af4c5ef3f922a200e33ee5ba0f0bc9ecf0b0c173958385", size = 4386740, upload-time = "2025-08-05T23:59:21.525Z" }, + { url = "https://files.pythonhosted.org/packages/0a/76/cf8d69da8d0b5ecb0db406f24a63a3f69ba5e791a11b782aeeefef27ccbb/cryptography-45.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:629127cfdcdc6806dfe234734d7cb8ac54edaf572148274fa377a7d3405b0043", size = 3331874, upload-time = "2025-08-05T23:59:23.017Z" }, ] [[package]] @@ -416,6 +416,7 @@ wheels = [ name = "kubeflow" source = { editable = "." } dependencies = [ + { name = "kubeflow-katib-api" }, { name = "kubeflow-trainer-api" }, { name = "kubernetes" }, { name = "pydantic" }, @@ -424,6 +425,7 @@ dependencies = [ [package.dev-dependencies] dev = [ { name = "coverage" }, + { name = "kubeflow-katib-api" }, { name = "kubeflow-trainer-api" }, { name = "pre-commit" }, { name = "pygithub" }, @@ -434,6 +436,7 @@ dev = [ [package.metadata] requires-dist = [ + { name = "kubeflow-katib-api", specifier = ">=0.19.0" }, { name = "kubeflow-trainer-api", specifier = ">=2.0.0" }, { name = "kubernetes", specifier = ">=27.2.0" }, { name = "pydantic", specifier = ">=2.10.0" }, @@ -442,6 +445,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "coverage", specifier = ">=7.0" }, + { name = "kubeflow-katib-api", git = "https://github.com/kubeflow/katib.git?subdirectory=api%2Fpython_api&rev=master" }, { name = "kubeflow-trainer-api", git = "https://github.com/kubeflow/trainer.git?subdirectory=api%2Fpython_api&rev=master" }, { name = "pre-commit", specifier = ">=4.2.0" }, { name = "pygithub", specifier = ">=2.7.0" }, @@ -450,6 +454,14 @@ dev = [ { name = "ruff", specifier = ">=0.12.2" }, ] +[[package]] +name = "kubeflow-katib-api" +version = "0.19.0" +source = { git = "https://github.com/kubeflow/katib.git?subdirectory=api%2Fpython_api&rev=master#38982e298d9c8d003d8964eaa067b71cb432fb7e" } +dependencies = [ + { name = "pydantic" }, +] + [[package]] name = "kubeflow-trainer-api" version = "2.0.0" @@ -566,9 +578,9 @@ wheels = [ name = "pycparser" version = "2.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, ] [[package]] @@ -706,9 +718,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6a/a7/403e04aa96e2d94e1518d518d69718c2ba978c8d3ffa4ab3b101b94dbafa/pygithub-2.7.0.tar.gz", hash = "sha256:7cd6eafabb09b5369afba3586d86b1f1ad6f1326d2ff01bc47bb26615dce4cbb", size = 3707928 } +sdist = { url = "https://files.pythonhosted.org/packages/6a/a7/403e04aa96e2d94e1518d518d69718c2ba978c8d3ffa4ab3b101b94dbafa/pygithub-2.7.0.tar.gz", hash = "sha256:7cd6eafabb09b5369afba3586d86b1f1ad6f1326d2ff01bc47bb26615dce4cbb", size = 3707928, upload-time = "2025-07-31T11:52:53.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/76/d768dd31322173b3956692b75471ac37bf3759c7abb603152f6a9b6594a8/pygithub-2.7.0-py3-none-any.whl", hash = "sha256:40ecbfe26dc55cc34ab4b0ffa1d455e6f816ef9a2bc8d6f5ad18ce572f163700", size = 416514 }, + { url = "https://files.pythonhosted.org/packages/57/76/d768dd31322173b3956692b75471ac37bf3759c7abb603152f6a9b6594a8/pygithub-2.7.0-py3-none-any.whl", hash = "sha256:40ecbfe26dc55cc34ab4b0ffa1d455e6f816ef9a2bc8d6f5ad18ce572f163700", size = 416514, upload-time = "2025-07-31T11:52:51.909Z" }, ] [[package]] @@ -724,9 +736,9 @@ wheels = [ name = "pyjwt" version = "2.10.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785 } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997 }, + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, ] [package.optional-dependencies] @@ -741,17 +753,17 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/22/27582568be639dfe22ddb3902225f91f2f17ceff88ce80e4db396c8986da/PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba", size = 3392854 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/75/0b8ede18506041c0bf23ac4d8e2971b4161cd6ce630b177d0a08eb0d8857/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1", size = 349920 }, - { url = "https://files.pythonhosted.org/packages/59/bb/fddf10acd09637327a97ef89d2a9d621328850a72f1fdc8c08bdf72e385f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92", size = 601722 }, - { url = "https://files.pythonhosted.org/packages/5d/70/87a065c37cca41a75f2ce113a5a2c2aa7533be648b184ade58971b5f7ccc/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394", size = 680087 }, - { url = "https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d", size = 856678 }, - { url = "https://files.pythonhosted.org/packages/66/28/ca86676b69bf9f90e710571b67450508484388bfce09acf8a46f0b8c785f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858", size = 1133660 }, - { url = "https://files.pythonhosted.org/packages/3d/85/c262db650e86812585e2bc59e497a8f59948a005325a11bbbc9ecd3fe26b/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b", size = 663824 }, - { url = "https://files.pythonhosted.org/packages/fd/1a/cc308a884bd299b651f1633acb978e8596c71c33ca85e9dc9fa33a5399b9/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff", size = 1117912 }, - { url = "https://files.pythonhosted.org/packages/25/2d/b7df6ddb0c2a33afdb358f8af6ea3b8c4d1196ca45497dd37a56f0c122be/PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543", size = 204624 }, - { url = "https://files.pythonhosted.org/packages/5e/22/d3db169895faaf3e2eda892f005f433a62db2decbcfbc2f61e6517adfa87/PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93", size = 212141 }, +sdist = { url = "https://files.pythonhosted.org/packages/a7/22/27582568be639dfe22ddb3902225f91f2f17ceff88ce80e4db396c8986da/PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba", size = 3392854, upload-time = "2022-01-07T22:05:41.134Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/75/0b8ede18506041c0bf23ac4d8e2971b4161cd6ce630b177d0a08eb0d8857/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1", size = 349920, upload-time = "2022-01-07T22:05:49.156Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/fddf10acd09637327a97ef89d2a9d621328850a72f1fdc8c08bdf72e385f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92", size = 601722, upload-time = "2022-01-07T22:05:50.989Z" }, + { url = "https://files.pythonhosted.org/packages/5d/70/87a065c37cca41a75f2ce113a5a2c2aa7533be648b184ade58971b5f7ccc/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394", size = 680087, upload-time = "2022-01-07T22:05:52.539Z" }, + { url = "https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d", size = 856678, upload-time = "2022-01-07T22:05:54.251Z" }, + { url = "https://files.pythonhosted.org/packages/66/28/ca86676b69bf9f90e710571b67450508484388bfce09acf8a46f0b8c785f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858", size = 1133660, upload-time = "2022-01-07T22:05:56.056Z" }, + { url = "https://files.pythonhosted.org/packages/3d/85/c262db650e86812585e2bc59e497a8f59948a005325a11bbbc9ecd3fe26b/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b", size = 663824, upload-time = "2022-01-07T22:05:57.434Z" }, + { url = "https://files.pythonhosted.org/packages/fd/1a/cc308a884bd299b651f1633acb978e8596c71c33ca85e9dc9fa33a5399b9/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff", size = 1117912, upload-time = "2022-01-07T22:05:58.665Z" }, + { url = "https://files.pythonhosted.org/packages/25/2d/b7df6ddb0c2a33afdb358f8af6ea3b8c4d1196ca45497dd37a56f0c122be/PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543", size = 204624, upload-time = "2022-01-07T22:06:00.085Z" }, + { url = "https://files.pythonhosted.org/packages/5e/22/d3db169895faaf3e2eda892f005f433a62db2decbcfbc2f61e6517adfa87/PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93", size = 212141, upload-time = "2022-01-07T22:06:01.861Z" }, ] [[package]]