diff --git a/.github/actions/publish-from-template/action.yml b/.github/actions/publish-from-template/action.yml index 1e5117c47..8c45e3727 100644 --- a/.github/actions/publish-from-template/action.yml +++ b/.github/actions/publish-from-template/action.yml @@ -16,8 +16,7 @@ runs: shell: bash env: ${{ env }} run: - python .github/actions/publish-from-template/render_template.py ${{ - inputs.filename }} + python ${{ github.action_path }}/render_template.py ${{inputs.filename }} - uses: JasonEtco/create-an-issue@v2 # Only render template and create an issue in case the workflow is a scheduled one diff --git a/.github/workflows/kubernetes_test.yaml b/.github/workflows/kubernetes_test.yaml index d987066ec..ce34caa6c 100644 --- a/.github/workflows/kubernetes_test.yaml +++ b/.github/workflows/kubernetes_test.yaml @@ -177,6 +177,7 @@ jobs: pytest tests/tests_deployment/ -v -s - name: JupyterHub Notebook Tests + timeout-minutes: 2 # run jhub-client after pytest since jhubctl can cleanup # the running server run: | diff --git a/.github/workflows/test-provider.yaml b/.github/workflows/test-provider.yaml index f56717abd..3c0a3fa89 100644 --- a/.github/workflows/test-provider.yaml +++ b/.github/workflows/test-provider.yaml @@ -85,6 +85,8 @@ jobs: kv/data/repository/nebari-dev/nebari/azure/nebari-dev-ci/github-nebari-dev-repo-ci tenant_id | ARM_TENANT_ID; kv/data/repository/nebari-dev/nebari/azure/nebari-dev-ci/github-nebari-dev-repo-ci subscription_id | ARM_SUBSCRIPTION_ID; kv/data/repository/nebari-dev/nebari/shared_secrets DIGITALOCEAN_TOKEN | DIGITALOCEAN_TOKEN; + kv/data/repository/nebari-dev/nebari/shared_secrets SPACES_ACCESS_KEY_ID | SPACES_ACCESS_KEY_ID; + kv/data/repository/nebari-dev/nebari/shared_secrets SPACES_SECRET_ACCESS_KEY | SPACES_SECRET_ACCESS_KEY; - name: 'Authenticate to GCP' if: ${{ matrix.provider == 'gcp' }} @@ -123,7 +125,7 @@ jobs: - name: Nebari Initialize run: | - nebari init "${{ matrix.provider }}" --project "TestProvider" --domain "${{ matrix.provider }}.nebari.dev" --auth-provider github --disable-prompt --ci-provider ${{ matrix.cicd }} + nebari init "${{ matrix.provider }}" --project "TestProvider" --domain "${{ matrix.provider }}.nebari.dev" --auth-provider password --disable-prompt --ci-provider ${{ matrix.cicd }} cat "nebari-config.yaml" - name: Nebari Render diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4efaf1895..fd650b0d4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -29,7 +29,7 @@ repos: - id: check-json - id: check-yaml # jinja2 templates for helm charts - exclude: "src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/(clearml/chart/templates/.*|prefect/chart/templates/.*)" + exclude: "src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/(clearml/chart/templates/.*|prefect/chart/templates/.*)" args: [--allow-multiple-documents] - id: check-toml # Lint: Checks that non-binary executables have a proper shebang. diff --git a/pyproject.toml b/pyproject.toml index 3c9b11309..eebff1089 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,6 +54,7 @@ classifiers = [ ] dependencies = [ + "pluggy==1.0.0", "auth0-python==4.0.0", "azure-identity==1.12.0", "azure-mgmt-containerservice==19.1.0", @@ -96,7 +97,7 @@ Documentation = "https://www.nebari.dev/docs" Source = "https://github.com/nebari-dev/nebari" [project.scripts] -nebari = "_nebari.cli.main:app" +nebari = "nebari.__main__:main" [tool.ruff] select = [ diff --git a/pytest.ini b/pytest.ini index 89f5ec586..ee4e7f5cb 100644 --- a/pytest.ini +++ b/pytest.ini @@ -6,6 +6,8 @@ addopts = --tb=native # turn warnings into errors -Werror + # ignore deprecation warnings (TODO: filter further) + -W ignore::DeprecationWarning markers = conda: conda required to run this test (deselect with '-m \"not conda\"') aws: deploy on aws diff --git a/src/_nebari/cli.py b/src/_nebari/cli.py new file mode 100644 index 000000000..de91cc185 --- /dev/null +++ b/src/_nebari/cli.py @@ -0,0 +1,84 @@ +import typing + +import typer +from typer.core import TyperGroup + +from _nebari.version import __version__ +from nebari.plugins import nebari_plugin_manager + + +class OrderCommands(TyperGroup): + def list_commands(self, ctx: typer.Context): + """Return list of commands in the order appear.""" + return list(self.commands) + + +def version_callback(value: bool): + if value: + typer.echo(__version__) + raise typer.Exit() + + +def exclude_stages(ctx: typer.Context, stages: typing.List[str]): + nebari_plugin_manager.excluded_stages = stages + return stages + + +def exclude_default_stages(ctx: typer.Context, exclude_default_stages: bool): + nebari_plugin_manager.exclude_default_stages = exclude_default_stages + return exclude_default_stages + + +def import_plugin(plugins: typing.List[str]): + try: + nebari_plugin_manager.load_plugins(plugins) + except ModuleNotFoundError: + typer.echo( + "ERROR: Python module {e.name} not found. Make sure that the module is in your python path {sys.path}" + ) + typer.Exit() + return plugins + + +def create_cli(): + app = typer.Typer( + cls=OrderCommands, + help="Nebari CLI 🪴", + add_completion=False, + no_args_is_help=True, + rich_markup_mode="rich", + pretty_exceptions_show_locals=False, + context_settings={"help_option_names": ["-h", "--help"]}, + ) + + @app.callback() + def common( + ctx: typer.Context, + version: bool = typer.Option( + None, + "-V", + "--version", + help="Nebari version number", + callback=version_callback, + ), + plugins: typing.List[str] = typer.Option( + [], + "--import-plugin", + help="Import nebari plugin", + ), + excluded_stages: typing.List[str] = typer.Option( + [], + "--exclude-stage", + help="Exclude nebari stage(s) by name or regex", + ), + exclude_default_stages: bool = typer.Option( + False, + "--exclude-default-stages", + help="Exclude default nebari included stages", + ), + ): + pass + + nebari_plugin_manager.plugin_manager.hook.nebari_subcommand(cli=app) + + return app diff --git a/src/_nebari/cli/dev.py b/src/_nebari/cli/dev.py deleted file mode 100644 index 824125532..000000000 --- a/src/_nebari/cli/dev.py +++ /dev/null @@ -1,43 +0,0 @@ -import json -from pathlib import Path - -import typer - -from _nebari.keycloak import keycloak_rest_api_call - -app_dev = typer.Typer( - add_completion=False, - no_args_is_help=True, - rich_markup_mode="rich", - context_settings={"help_option_names": ["-h", "--help"]}, -) - - -@app_dev.command(name="keycloak-api") -def keycloak_api( - config_filename: str = typer.Option( - ..., - "-c", - "--config", - help="nebari configuration file path", - ), - request: str = typer.Option( - ..., - "-r", - "--request", - help="Send a REST API request, valid requests follow patterns found here: [green]keycloak.org/docs-api/15.0/rest-api[/green]", - ), -): - """ - Interact with the Keycloak REST API directly. - - This is an advanced tool which can have potentially destructive consequences. - Please use this at your own risk. - - """ - if isinstance(config_filename, str): - config_filename = Path(config_filename) - - r = keycloak_rest_api_call(config_filename, request=request) - - print(json.dumps(r, indent=4)) diff --git a/src/_nebari/cli/keycloak.py b/src/_nebari/cli/keycloak.py deleted file mode 100644 index 3b3511b3e..000000000 --- a/src/_nebari/cli/keycloak.py +++ /dev/null @@ -1,76 +0,0 @@ -import json -from pathlib import Path -from typing import Tuple - -import typer - -from _nebari.keycloak import do_keycloak, export_keycloak_users - -app_keycloak = typer.Typer( - add_completion=False, - no_args_is_help=True, - rich_markup_mode="rich", - context_settings={"help_option_names": ["-h", "--help"]}, -) - - -@app_keycloak.command(name="adduser") -def add_user( - add_users: Tuple[str, str] = typer.Option( - ..., "--user", help="Provide both: " - ), - config_filename: str = typer.Option( - ..., - "-c", - "--config", - help="nebari configuration file path", - ), -): - """Add a user to Keycloak. User will be automatically added to the [italic]analyst[/italic] group.""" - if isinstance(config_filename, str): - config_filename = Path(config_filename) - - args = ["adduser", add_users[0], add_users[1]] - - do_keycloak(config_filename, *args) - - -@app_keycloak.command(name="listusers") -def list_users( - config_filename: str = typer.Option( - ..., - "-c", - "--config", - help="nebari configuration file path", - ) -): - """List the users in Keycloak.""" - if isinstance(config_filename, str): - config_filename = Path(config_filename) - - args = ["listusers"] - - do_keycloak(config_filename, *args) - - -@app_keycloak.command(name="export-users") -def export_users( - config_filename: str = typer.Option( - ..., - "-c", - "--config", - help="nebari configuration file path", - ), - realm: str = typer.Option( - "nebari", - "--realm", - help="realm from which users are to be exported", - ), -): - """Export the users in Keycloak.""" - if isinstance(config_filename, str): - config_filename = Path(config_filename) - - r = export_keycloak_users(config_filename, realm=realm) - - print(json.dumps(r, indent=4)) diff --git a/src/_nebari/cli/main.py b/src/_nebari/cli/main.py deleted file mode 100644 index cbc0f03ef..000000000 --- a/src/_nebari/cli/main.py +++ /dev/null @@ -1,442 +0,0 @@ -from pathlib import Path -from typing import Optional -from zipfile import ZipFile - -import typer -from click import Context -from kubernetes import client -from kubernetes import config as kube_config -from rich import print -from ruamel import yaml -from typer.core import TyperGroup - -from _nebari.cli.dev import app_dev -from _nebari.cli.init import ( - check_auth_provider_creds, - check_cloud_provider_creds, - check_project_name, - check_ssl_cert_email, - enum_to_list, - guided_init_wizard, - handle_init, -) -from _nebari.cli.keycloak import app_keycloak -from _nebari.deploy import deploy_configuration -from _nebari.destroy import destroy_configuration -from _nebari.render import render_template -from _nebari.schema import ( - AuthenticationEnum, - CiEnum, - GitRepoEnum, - InitInputs, - ProviderEnum, - TerraformStateEnum, - verify, -) -from _nebari.upgrade import do_upgrade -from _nebari.utils import load_yaml -from _nebari.version import __version__ - -SECOND_COMMAND_GROUP_NAME = "Additional Commands" -GUIDED_INIT_MSG = ( - "[bold green]START HERE[/bold green] - this will guide you step-by-step " - "to generate your [purple]nebari-config.yaml[/purple]. " - "It is an [i]alternative[/i] to passing the options listed below." -) -KEYCLOAK_COMMAND_MSG = ( - "Interact with the Nebari Keycloak identity and access management tool." -) -DEV_COMMAND_MSG = "Development tools and advanced features." - - -def path_callback(value: str) -> Path: - return Path(value).expanduser().resolve() - - -def config_path_callback(value: str) -> Path: - value = path_callback(value) - if not value.is_file(): - raise ValueError(f"Passed configuration path {value} does not exist!") - return value - - -CONFIG_PATH_OPTION: Path = typer.Option( - ..., - "--config", - "-c", - help="nebari configuration yaml file path, please pass in as -c/--config flag", - callback=config_path_callback, -) - -OUTPUT_PATH_OPTION: Path = typer.Option( - Path.cwd(), - "-o", - "--output", - help="output directory", - callback=path_callback, -) - - -class OrderCommands(TyperGroup): - def list_commands(self, ctx: Context): - """Return list of commands in the order appear.""" - return list(self.commands) - - -app = typer.Typer( - cls=OrderCommands, - help="Nebari CLI 🪴", - add_completion=False, - no_args_is_help=True, - rich_markup_mode="rich", - context_settings={"help_option_names": ["-h", "--help"]}, -) -app.add_typer( - app_keycloak, - name="keycloak", - help=KEYCLOAK_COMMAND_MSG, - rich_help_panel=SECOND_COMMAND_GROUP_NAME, -) -app.add_typer( - app_dev, - name="dev", - help=DEV_COMMAND_MSG, - rich_help_panel=SECOND_COMMAND_GROUP_NAME, -) - - -@app.callback(invoke_without_command=True) -def version( - version: Optional[bool] = typer.Option( - None, - "-V", - "--version", - help="Nebari version number", - is_eager=True, - ), -): - if version: - print(__version__) - raise typer.Exit() - - -@app.command() -def init( - cloud_provider: str = typer.Argument( - "local", - help=f"options: {enum_to_list(ProviderEnum)}", - callback=check_cloud_provider_creds, - is_eager=True, - ), - # Although this unused below, the functionality is contained in the callback. Thus, - # this attribute cannot be removed. - guided_init: bool = typer.Option( - False, - help=GUIDED_INIT_MSG, - callback=guided_init_wizard, - is_eager=True, - ), - project_name: str = typer.Option( - ..., - "--project-name", - "--project", - "-p", - callback=check_project_name, - ), - domain_name: str = typer.Option( - ..., - "--domain-name", - "--domain", - "-d", - ), - namespace: str = typer.Option( - "dev", - ), - auth_provider: str = typer.Option( - "password", - help=f"options: {enum_to_list(AuthenticationEnum)}", - callback=check_auth_provider_creds, - ), - auth_auto_provision: bool = typer.Option( - False, - ), - repository: str = typer.Option( - None, - help=f"options: {enum_to_list(GitRepoEnum)}", - ), - repository_auto_provision: bool = typer.Option( - False, - ), - ci_provider: str = typer.Option( - None, - help=f"options: {enum_to_list(CiEnum)}", - ), - terraform_state: str = typer.Option( - "remote", help=f"options: {enum_to_list(TerraformStateEnum)}" - ), - kubernetes_version: str = typer.Option( - "latest", - ), - ssl_cert_email: str = typer.Option( - None, - callback=check_ssl_cert_email, - ), - disable_prompt: bool = typer.Option( - False, - is_eager=True, - ), -): - """ - Create and initialize your [purple]nebari-config.yaml[/purple] file. - - This command will create and initialize your [purple]nebari-config.yaml[/purple] :sparkles: - - This file contains all your Nebari cluster configuration details and, - is used as input to later commands such as [green]nebari render[/green], [green]nebari deploy[/green], etc. - - If you're new to Nebari, we recommend you use the Guided Init wizard. - To get started simply run: - - [green]nebari init --guided-init[/green] - - """ - inputs = InitInputs() - - inputs.cloud_provider = cloud_provider - inputs.project_name = project_name - inputs.domain_name = domain_name - inputs.namespace = namespace - inputs.auth_provider = auth_provider - inputs.auth_auto_provision = auth_auto_provision - inputs.repository = repository - inputs.repository_auto_provision = repository_auto_provision - inputs.ci_provider = ci_provider - inputs.terraform_state = terraform_state - inputs.kubernetes_version = kubernetes_version - inputs.ssl_cert_email = ssl_cert_email - inputs.disable_prompt = disable_prompt - - handle_init(inputs) - - -@app.command(rich_help_panel=SECOND_COMMAND_GROUP_NAME) -def validate( - config_path=CONFIG_PATH_OPTION, - enable_commenting: bool = typer.Option( - False, "--enable-commenting", help="Toggle PR commenting on GitHub Actions" - ), -): - """ - Validate the values in the [purple]nebari-config.yaml[/purple] file are acceptable. - """ - config = load_yaml(config_path) - - if enable_commenting: - # for PR's only - # comment_on_pr(config) - pass - else: - verify(config) - print("[bold purple]Successfully validated configuration.[/bold purple]") - - -@app.command(rich_help_panel=SECOND_COMMAND_GROUP_NAME) -def render( - output_path=OUTPUT_PATH_OPTION, - config_path=CONFIG_PATH_OPTION, - dry_run: bool = typer.Option( - False, - "--dry-run", - help="simulate rendering files without actually writing or updating any files", - ), -): - """ - Dynamically render the Terraform scripts and other files from your [purple]nebari-config.yaml[/purple] file. - """ - config = load_yaml(config_path) - - verify(config) - - render_template(output_path, config_path, dry_run=dry_run) - - -@app.command() -def deploy( - config_path=CONFIG_PATH_OPTION, - output_path=OUTPUT_PATH_OPTION, - dns_provider: str = typer.Option( - False, - "--dns-provider", - help="dns provider to use for registering domain name mapping", - ), - dns_auto_provision: bool = typer.Option( - False, - "--dns-auto-provision", - help="Attempt to automatically provision DNS, currently only available for `cloudflare`", - ), - disable_prompt: bool = typer.Option( - False, - "--disable-prompt", - help="Disable human intervention", - ), - disable_render: bool = typer.Option( - False, - "--disable-render", - help="Disable auto-rendering in deploy stage", - ), - disable_checks: bool = typer.Option( - False, - "--disable-checks", - help="Disable the checks performed after each stage", - ), - skip_remote_state_provision: bool = typer.Option( - False, - "--skip-remote-state-provision", - help="Skip terraform state deployment which is often required in CI once the terraform remote state bootstrapping phase is complete", - ), -): - """ - Deploy the Nebari cluster from your [purple]nebari-config.yaml[/purple] file. - """ - config = load_yaml(config_path) - - verify(config) - - if not disable_render: - render_template(output_path, config_path) - - deploy_configuration( - config, - dns_provider=dns_provider, - dns_auto_provision=dns_auto_provision, - disable_prompt=disable_prompt, - disable_checks=disable_checks, - skip_remote_state_provision=skip_remote_state_provision, - ) - - -@app.command() -def destroy( - config_path=CONFIG_PATH_OPTION, - output_path=OUTPUT_PATH_OPTION, - disable_render: bool = typer.Option( - False, - "--disable-render", - help="Disable auto-rendering before destroy", - ), - disable_prompt: bool = typer.Option( - False, - "--disable-prompt", - help="Destroy entire Nebari cluster without confirmation request. Suggested for CI use.", - ), -): - """ - Destroy the Nebari cluster from your [purple]nebari-config.yaml[/purple] file. - """ - - def _run_destroy(config_path=config_path, disable_render=disable_render): - config = load_yaml(config_path) - - verify(config) - - if not disable_render: - render_template(output_path, config_path) - - destroy_configuration(config) - - if disable_prompt: - _run_destroy() - elif typer.confirm("Are you sure you want to destroy your Nebari cluster?"): - _run_destroy() - else: - raise typer.Abort() - - -@app.command(rich_help_panel=SECOND_COMMAND_GROUP_NAME) -def upgrade( - config_path=CONFIG_PATH_OPTION, - attempt_fixes: bool = typer.Option( - False, - "--attempt-fixes", - help="Attempt to fix the config for any incompatibilities between your old and new Nebari versions.", - ), -): - """ - Upgrade your [purple]nebari-config.yaml[/purple]. - - Upgrade your [purple]nebari-config.yaml[/purple] after an nebari upgrade. If necessary, prompts users to perform manual upgrade steps required for the deploy process. - - See the project [green]RELEASE.md[/green] for details. - """ - do_upgrade(config_path, attempt_fixes=attempt_fixes) - - -@app.command(rich_help_panel=SECOND_COMMAND_GROUP_NAME) -def support( - config_path=CONFIG_PATH_OPTION, - output_path=OUTPUT_PATH_OPTION, -): - """ - Support tool to write all Kubernetes logs locally and compress them into a zip file. - - The Nebari team recommends k9s to manage and inspect the state of the cluster. - However, this command occasionally helpful for debugging purposes should the logs need to be shared. - """ - kube_config.load_kube_config() - - v1 = client.CoreV1Api() - - namespace = get_config_namespace(config_path) - - pods = v1.list_namespaced_pod(namespace=namespace) - - for pod in pods.items: - Path(f"./log/{namespace}").mkdir(parents=True, exist_ok=True) - path = Path(f"./log/{namespace}/{pod.metadata.name}.txt") - with path.open(mode="wt") as file: - try: - file.write( - "%s\t%s\t%s\n" - % ( - pod.status.pod_ip, - namespace, - pod.metadata.name, - ) - ) - - # some pods are running multiple containers - containers = [ - _.name if len(pod.spec.containers) > 1 else None - for _ in pod.spec.containers - ] - - for container in containers: - if container is not None: - file.write(f"Container: {container}\n") - file.write( - v1.read_namespaced_pod_log( - name=pod.metadata.name, - namespace=namespace, - container=container, - ) - ) - - except client.exceptions.ApiException as e: - file.write("%s not available" % pod.metadata.name) - raise e - - with ZipFile(output_path, "w") as zip: - for file in list(Path(f"./log/{namespace}").glob("*.txt")): - print(file) - zip.write(file) - - -def get_config_namespace(config_path): - with open(config_path) as f: - config = yaml.safe_load(f.read()) - - return config["namespace"] - - -if __name__ == "__main__": - app() diff --git a/src/_nebari/config.py b/src/_nebari/config.py new file mode 100644 index 000000000..ca8b8db96 --- /dev/null +++ b/src/_nebari/config.py @@ -0,0 +1,117 @@ +import os +import pathlib +import typing + +import pydantic + +from _nebari.utils import yaml + + +def set_nested_attribute(data: typing.Any, attrs: typing.List[str], value: typing.Any): + """Takes an arbitrary set of attributes and accesses the deep + nested object config to set value + + """ + + def _get_attr(d: typing.Any, attr: str): + if hasattr(d, "__getitem__"): + if re.fullmatch(r"\d+", attr): + try: + return d[int(attr)] + except Exception: + return d[attr] + else: + return d[attr] + else: + return getattr(d, attr) + + def _set_attr(d: typing.Any, attr: str, value: typing.Any): + if hasattr(d, "__getitem__"): + if re.fullmatch(r"\d+", attr): + try: + d[int(attr)] = value + except Exception: + d[attr] = value + else: + d[attr] = value + else: + return setattr(d, attr, value) + + data_pos = data + for attr in attrs[:-1]: + data_pos = _get_attr(data_pos, attr) + _set_attr(data_pos, attrs[-1], value) + + +def set_config_from_environment_variables( + config: pydantic.BaseModel, keyword: str = "NEBARI_SECRET", separator: str = "__" +): + """Setting nebari configuration values from environment variables + + For example `NEBARI_SECRET__ci_cd__branch=master` would set `ci_cd.branch = "master"` + """ + nebari_secrets = [_ for _ in os.environ if _.startswith(keyword + separator)] + for secret in nebari_secrets: + attrs = secret[len(keyword + separator) :].split(separator) + try: + set_nested_attribute(config, attrs, os.environ[secret]) + except Exception as e: + print( + f"FAILED: setting secret from environment variable={secret} due to the following error\n {e}" + ) + sys.exit(1) + return config + + +def read_configuration( + config_filename: pathlib.Path, + config_schema: pydantic.BaseModel, + read_environment: bool = True, +): + """Read configuration from multiple sources and apply validation""" + filename = pathlib.Path(config_filename) + + if not filename.is_file(): + raise ValueError( + f"passed in configuration filename={config_filename} does not exist" + ) + + with filename.open() as f: + config = config_schema(**yaml.load(f.read())) + + if read_environment: + config = set_config_from_environment_variables(config) + + return config + + +def write_configuration( + config_filename: pathlib.Path, + config: typing.Union[pydantic.BaseModel, typing.Dict], + mode: str = "w", +): + with config_filename.open(mode) as f: + if isinstance(config, pydantic.BaseModel): + yaml.dump(config.dict(), f) + else: + yaml.dump(config, f) + + +def backup_configuration(filename: pathlib.Path, extrasuffix: str = ""): + if not filename.exists(): + return + + # Backup old file + backup_filename = pathlib.Path(f"{filename}{extrasuffix}.backup") + + if backup_filename.exists(): + i = 1 + while True: + next_backup_filename = pathlib.Path(f"{backup_filename}~{i}") + if not next_backup_filename.exists(): + backup_filename = next_backup_filename + break + i = i + 1 + + filename.rename(backup_filename) + print(f"Backing up {filename} as {backup_filename}") diff --git a/src/_nebari/constants.py b/src/_nebari/constants.py index dc1b2d884..f4c1edcbf 100644 --- a/src/_nebari/constants.py +++ b/src/_nebari/constants.py @@ -5,7 +5,7 @@ # 04-kubernetes-ingress DEFAULT_TRAEFIK_IMAGE_TAG = "2.9.1" -HIGHEST_SUPPORTED_K8S_VERSION = "1.25.12" +HIGHEST_SUPPORTED_K8S_VERSION = ("1", "24", "16") DEFAULT_GKE_RELEASE_CHANNEL = "UNSPECIFIED" DEFAULT_NEBARI_DASK_VERSION = CURRENT_RELEASE @@ -15,3 +15,10 @@ DEFAULT_CONDA_STORE_IMAGE_TAG = "v0.4.14" LATEST_SUPPORTED_PYTHON_VERSION = "3.10" + + +# DOCS +DO_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-do" +AZURE_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-azure" +AWS_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-aws" +GCP_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-gcp" diff --git a/src/_nebari/deploy.py b/src/_nebari/deploy.py index 0dcd951af..394fe0cce 100644 --- a/src/_nebari/deploy.py +++ b/src/_nebari/deploy.py @@ -1,274 +1,25 @@ +import contextlib import logging -import subprocess +import pathlib import textwrap -from pathlib import Path +from typing import List -from _nebari.provider import terraform -from _nebari.provider.dns.cloudflare import update_record -from _nebari.stages import checks, input_vars, state_imports -from _nebari.utils import ( - check_cloud_credentials, - keycloak_provider_context, - kubernetes_provider_context, - timer, -) +from _nebari.utils import timer +from nebari import hookspecs, schema logger = logging.getLogger(__name__) -def provision_01_terraform_state(stage_outputs, config): - directory = Path("stages/01-terraform-state") - - if config["provider"] in {"existing", "local"}: - stage_outputs[directory] = {} - else: - stage_outputs[directory] = terraform.deploy( - terraform_import=True, - directory=directory / config["provider"], - input_vars=input_vars.stage_01_terraform_state(stage_outputs, config), - state_imports=state_imports.stage_01_terraform_state(stage_outputs, config), - ) - - -def provision_02_infrastructure(stage_outputs, config, disable_checks=False): - """Generalized method to provision infrastructure. - - After successful deployment the following properties are set on - `stage_outputs[directory]`. - - `kubernetes_credentials` which are sufficient credentials to - connect with the kubernetes provider - - `kubeconfig_filename` which is a path to a kubeconfig that can - be used to connect to a kubernetes cluster - - at least one node running such that resources in the - node_group.general can be scheduled - - At a high level this stage is expected to provision a kubernetes - cluster on a given provider. - """ - directory = "stages/02-infrastructure" - - stage_outputs[directory] = terraform.deploy( - Path(directory) / config["provider"], - input_vars=input_vars.stage_02_infrastructure(stage_outputs, config), - ) - - if not disable_checks: - checks.stage_02_infrastructure(stage_outputs, config) - - -def provision_03_kubernetes_initialize(stage_outputs, config, disable_checks=False): - directory = "stages/03-kubernetes-initialize" - - stage_outputs[directory] = terraform.deploy( - directory=directory, - input_vars=input_vars.stage_03_kubernetes_initialize(stage_outputs, config), - ) - - if not disable_checks: - checks.stage_03_kubernetes_initialize(stage_outputs, config) - - -def provision_04_kubernetes_ingress(stage_outputs, config, disable_checks=False): - directory = "stages/04-kubernetes-ingress" - - stage_outputs[directory] = terraform.deploy( - directory=directory, - input_vars=input_vars.stage_04_kubernetes_ingress(stage_outputs, config), - ) - - if not disable_checks: - checks.stage_04_kubernetes_ingress(stage_outputs, config) - - -def add_clearml_dns(zone_name, record_name, record_type, ip_or_hostname): - dns_records = [ - f"app.clearml.{record_name}", - f"api.clearml.{record_name}", - f"files.clearml.{record_name}", - ] - - for dns_record in dns_records: - update_record(zone_name, dns_record, record_type, ip_or_hostname) - - -def provision_ingress_dns( - stage_outputs, - config, - dns_provider: str, - dns_auto_provision: bool, - disable_prompt: bool = True, - disable_checks: bool = False, -): - directory = "stages/04-kubernetes-ingress" - - ip_or_name = stage_outputs[directory]["load_balancer_address"]["value"] - ip_or_hostname = ip_or_name["hostname"] or ip_or_name["ip"] - - if dns_auto_provision and dns_provider == "cloudflare": - record_name, zone_name = ( - config["domain"].split(".")[:-2], - config["domain"].split(".")[-2:], - ) - record_name = ".".join(record_name) - zone_name = ".".join(zone_name) - if config["provider"] in {"do", "gcp", "azure"}: - update_record(zone_name, record_name, "A", ip_or_hostname) - if config.get("clearml", {}).get("enabled"): - add_clearml_dns(zone_name, record_name, "A", ip_or_hostname) - - elif config["provider"] == "aws": - update_record(zone_name, record_name, "CNAME", ip_or_hostname) - if config.get("clearml", {}).get("enabled"): - add_clearml_dns(zone_name, record_name, "CNAME", ip_or_hostname) - else: - logger.info( - f"Couldn't update the DNS record for cloud provider: {config['provider']}" - ) - elif not disable_prompt: - input( - f"Take IP Address {ip_or_hostname} and update DNS to point to " - f'"{config["domain"]}" [Press Enter when Complete]' - ) - - if not disable_checks: - checks.check_ingress_dns(stage_outputs, config, disable_prompt) - - -def provision_05_kubernetes_keycloak(stage_outputs, config, disable_checks=False): - directory = "stages/05-kubernetes-keycloak" - - stage_outputs[directory] = terraform.deploy( - directory=directory, - input_vars=input_vars.stage_05_kubernetes_keycloak(stage_outputs, config), - ) - - if not disable_checks: - checks.stage_05_kubernetes_keycloak(stage_outputs, config) - - -def provision_06_kubernetes_keycloak_configuration( - stage_outputs, config, disable_checks=False -): - directory = "stages/06-kubernetes-keycloak-configuration" - - stage_outputs[directory] = terraform.deploy( - directory=directory, - input_vars=input_vars.stage_06_kubernetes_keycloak_configuration( - stage_outputs, config - ), - ) - - if not disable_checks: - checks.stage_06_kubernetes_keycloak_configuration(stage_outputs, config) - - -def provision_07_kubernetes_services(stage_outputs, config, disable_checks=False): - directory = "stages/07-kubernetes-services" - - stage_outputs[directory] = terraform.deploy( - directory=directory, - input_vars=input_vars.stage_07_kubernetes_services(stage_outputs, config), - ) - - if not disable_checks: - checks.stage_07_kubernetes_services(stage_outputs, config) - - -def provision_08_nebari_tf_extensions(stage_outputs, config, disable_checks=False): - directory = "stages/08-nebari-tf-extensions" - - stage_outputs[directory] = terraform.deploy( - directory=directory, - input_vars=input_vars.stage_08_nebari_tf_extensions(stage_outputs, config), - ) - - if not disable_checks: - pass - - -def guided_install( - config, - dns_provider, - dns_auto_provision, - disable_prompt=False, - disable_checks=False, - skip_remote_state_provision=False, -): - # 01 Check Environment Variables - check_cloud_credentials(config) - - stage_outputs = {} - if ( - config["provider"] not in {"existing", "local"} - and config["terraform_state"]["type"] == "remote" - ): - if skip_remote_state_provision: - print("Skipping remote state provision") - else: - provision_01_terraform_state(stage_outputs, config) - - provision_02_infrastructure(stage_outputs, config, disable_checks) - - with kubernetes_provider_context( - stage_outputs["stages/02-infrastructure"]["kubernetes_credentials"]["value"] - ): - provision_03_kubernetes_initialize(stage_outputs, config, disable_checks) - provision_04_kubernetes_ingress(stage_outputs, config, disable_checks) - provision_ingress_dns( - stage_outputs, - config, - dns_provider=dns_provider, - dns_auto_provision=dns_auto_provision, - disable_prompt=disable_prompt, - disable_checks=disable_checks, - ) - provision_05_kubernetes_keycloak(stage_outputs, config, disable_checks) - - with keycloak_provider_context( - stage_outputs["stages/05-kubernetes-keycloak"]["keycloak_credentials"][ - "value" - ] - ): - provision_06_kubernetes_keycloak_configuration( - stage_outputs, config, disable_checks - ) - provision_07_kubernetes_services(stage_outputs, config, disable_checks) - provision_08_nebari_tf_extensions(stage_outputs, config, disable_checks) - - print("Nebari deployed successfully") - - print("Services:") - for service_name, service in stage_outputs["stages/07-kubernetes-services"][ - "service_urls" - ]["value"].items(): - print(f" - {service_name} -> {service['url']}") - - print( - f"Kubernetes kubeconfig located at file://{stage_outputs['stages/02-infrastructure']['kubeconfig_filename']['value']}" - ) - username = "root" - password = ( - config.get("security", {}).get("keycloak", {}).get("initial_root_password", "") - ) - if password: - print(f"Kubecloak master realm username={username} password={password}") - - print( - "Additional administration docs can be found at https://docs.nebari.dev/en/stable/source/admin_guide/" - ) - return stage_outputs - - def deploy_configuration( - config, + config: schema.Main, + stages: List[hookspecs.NebariStage], dns_provider, dns_auto_provision, - disable_prompt, - disable_checks, - skip_remote_state_provision, + disable_prompt: bool = False, + disable_checks: bool = False, + skip_remote_state_provision: bool = False, ): - if config.get("prevent_deploy", False): - # Note if we used the Pydantic model properly, we might get that nebari_config.prevent_deploy always exists but defaults to False + if config.prevent_deploy: raise ValueError( textwrap.dedent( """ @@ -285,7 +36,12 @@ def deploy_configuration( ) ) - logger.info(f'All nebari endpoints will be under https://{config["domain"]}') + if config.domain is None: + logger.info( + "All nebari endpoints will be under kubernetes load balancer address which cannot be known before deployment" + ) + else: + logger.info(f"All nebari endpoints will be under https://{config.domain}") if disable_checks: logger.warning( @@ -293,16 +49,30 @@ def deploy_configuration( ) with timer(logger, "deploying Nebari"): - try: - return guided_install( - config, - dns_provider, - dns_auto_provision, - disable_prompt, - disable_checks, - skip_remote_state_provision, - ) - except subprocess.CalledProcessError as e: - logger.error("subprocess command failed") - logger.error(e.output) - raise e + stage_outputs = {} + with contextlib.ExitStack() as stack: + for stage in stages: + s = stage(output_directory=pathlib.Path.cwd(), config=config) + stack.enter_context(s.deploy(stage_outputs)) + + if not disable_checks: + s.check(stage_outputs) + print("Nebari deployed successfully") + + print("Services:") + for service_name, service in stage_outputs["stages/07-kubernetes-services"][ + "service_urls" + ]["value"].items(): + print(f" - {service_name} -> {service['url']}") + + print( + f"Kubernetes kubeconfig located at file://{stage_outputs['stages/02-infrastructure']['kubeconfig_filename']['value']}" + ) + username = "root" + password = config.security.keycloak.initial_root_password + if password: + print(f"Kubecloak master realm username={username} password={password}") + + print( + "Additional administration docs can be found at https://docs.nebari.dev/en/stable/source/admin_guide/" + ) diff --git a/src/_nebari/destroy.py b/src/_nebari/destroy.py index eefd3eefc..900ad8acf 100644 --- a/src/_nebari/destroy.py +++ b/src/_nebari/destroy.py @@ -1,185 +1,35 @@ -import functools +import contextlib import logging -from pathlib import Path +import pathlib +from typing import List -from _nebari.provider import terraform -from _nebari.stages import input_vars, state_imports -from _nebari.utils import ( - check_cloud_credentials, - keycloak_provider_context, - kubernetes_provider_context, - timer, -) +from _nebari.utils import timer +from nebari import hookspecs, schema logger = logging.getLogger(__name__) -def gather_stage_outputs(config): - stage_outputs = {} - - _terraform_init_output = functools.partial( - terraform.deploy, - terraform_init=True, - terraform_import=True, - terraform_apply=False, - terraform_destroy=False, - ) - - if ( - config["provider"] not in {"existing", "local"} - and config["terraform_state"]["type"] == "remote" - ): - stage_outputs["stages/01-terraform-state"] = _terraform_init_output( - directory=Path("stages/01-terraform-state") / config["provider"], - input_vars=input_vars.stage_01_terraform_state(stage_outputs, config), - state_imports=state_imports.stage_01_terraform_state(stage_outputs, config), - ) - - stage_outputs["stages/02-infrastructure"] = _terraform_init_output( - directory=Path("stages/02-infrastructure") / config["provider"], - input_vars=input_vars.stage_02_infrastructure(stage_outputs, config), - ) - - stage_outputs["stages/03-kubernetes-initialize"] = _terraform_init_output( - directory="stages/03-kubernetes-initialize", - input_vars=input_vars.stage_03_kubernetes_initialize(stage_outputs, config), - ) - - stage_outputs["stages/04-kubernetes-ingress"] = _terraform_init_output( - directory="stages/04-kubernetes-ingress", - input_vars=input_vars.stage_04_kubernetes_ingress(stage_outputs, config), - ) - - stage_outputs["stages/05-kubernetes-keycloak"] = _terraform_init_output( - directory="stages/05-kubernetes-keycloak", - input_vars=input_vars.stage_05_kubernetes_keycloak(stage_outputs, config), - ) - - stage_outputs[ - "stages/06-kubernetes-keycloak-configuration" - ] = _terraform_init_output( - directory="stages/06-kubernetes-keycloak-configuration", - input_vars=input_vars.stage_06_kubernetes_keycloak_configuration( - stage_outputs, config - ), - ) - - stage_outputs["stages/07-kubernetes-services"] = _terraform_init_output( - directory="stages/07-kubernetes-services", - input_vars=input_vars.stage_07_kubernetes_services(stage_outputs, config), - ) - - stage_outputs["stages/08-nebari-tf-extensions"] = _terraform_init_output( - directory="stages/08-nebari-tf-extensions", - input_vars=input_vars.stage_08_nebari_tf_extensions(stage_outputs, config), - ) - - return stage_outputs - - -def destroy_stages(stage_outputs, config): - def _terraform_destroy(ignore_errors=False, terraform_apply=False, **kwargs): - try: - terraform.deploy( - terraform_init=True, - terraform_import=True, - terraform_apply=terraform_apply, - terraform_destroy=True, - **kwargs, - ) - except terraform.TerraformException as e: - if not ignore_errors: - raise e - return False - return True - - status = {} - - with kubernetes_provider_context( - stage_outputs["stages/02-infrastructure"]["kubernetes_credentials"]["value"] - ): - with keycloak_provider_context( - stage_outputs["stages/05-kubernetes-keycloak"]["keycloak_credentials"][ - "value" - ] - ): - status["stages/08-nebari-tf-extensions"] = _terraform_destroy( - directory="stages/08-nebari-tf-extensions", - input_vars=input_vars.stage_08_nebari_tf_extensions( - stage_outputs, config - ), - ignore_errors=True, - ) - - status["stages/07-kubernetes-services"] = _terraform_destroy( - directory="stages/07-kubernetes-services", - input_vars=input_vars.stage_07_kubernetes_services( - stage_outputs, config - ), - ignore_errors=True, - ) - - status["stages/06-kubernetes-keycloak-configuration"] = _terraform_destroy( - directory="stages/06-kubernetes-keycloak-configuration", - input_vars=input_vars.stage_06_kubernetes_keycloak_configuration( - stage_outputs, config - ), - ignore_errors=True, - ) - - status["stages/05-kubernetes-keycloak"] = _terraform_destroy( - directory="stages/05-kubernetes-keycloak", - input_vars=input_vars.stage_05_kubernetes_keycloak(stage_outputs, config), - ignore_errors=True, - ) - - status["stages/04-kubernetes-ingress"] = _terraform_destroy( - directory="stages/04-kubernetes-ingress", - input_vars=input_vars.stage_04_kubernetes_ingress(stage_outputs, config), - ignore_errors=True, - ) - - status["stages/03-kubernetes-initialize"] = _terraform_destroy( - directory="stages/03-kubernetes-initialize", - input_vars=input_vars.stage_03_kubernetes_initialize(stage_outputs, config), - ignore_errors=True, - ) - - status["stages/02-infrastructure"] = _terraform_destroy( - directory=Path("stages/02-infrastructure") / config["provider"], - input_vars=input_vars.stage_02_infrastructure(stage_outputs, config), - ignore_errors=True, - ) - - if ( - config["provider"] not in {"existing", "local"} - and config["terraform_state"]["type"] == "remote" - ): - status["stages/01-terraform-state"] = _terraform_destroy( - # acl and force_destroy do not import properly - # and only get refreshed properly with an apply - terraform_apply=True, - directory=Path("stages/01-terraform-state") / config["provider"], - input_vars=input_vars.stage_01_terraform_state(stage_outputs, config), - ignore_errors=True, - ) - - return status - - -def destroy_configuration(config): +def destroy_configuration(config: schema.Main, stages: List[hookspecs.NebariStage]): logger.info( """Removing all infrastructure, your local files will still remain, you can use 'nebari deploy' to re-install infrastructure using same config file\n""" ) - check_cloud_credentials(config) - # Populate stage_outputs to determine progress of deployment and - # get credentials to kubernetes and keycloak context - stage_outputs = gather_stage_outputs(config) + stage_outputs = {} + status = {} with timer(logger, "destroying Nebari"): - status = destroy_stages(stage_outputs, config) + with contextlib.ExitStack() as stack: + for stage in stages: + try: + s = stage(output_directory=pathlib.Path.cwd(), config=config) + stack.enter_context(s.destroy(stage_outputs, status)) + except Exception as e: + status[s.name] = False + print( + f"ERROR: stage={s.name} failed due to {e}. Due to stages depending on each other we can only destroy stages that occur before this stage" + ) + break for stage_name, success in status.items(): if not success: diff --git a/src/_nebari/initialize.py b/src/_nebari/initialize.py index d176c9e20..559ea5ae3 100644 --- a/src/_nebari/initialize.py +++ b/src/_nebari/initialize.py @@ -1,425 +1,102 @@ import logging import os -import random import re -import secrets -import string import tempfile from pathlib import Path +import pydantic import requests from _nebari.provider import git from _nebari.provider.cicd import github from _nebari.provider.oauth.auth0 import create_client -from _nebari.utils import ( - check_cloud_credentials, - namestr_regex, - set_docker_image_tag, - set_kubernetes_version, - set_nebari_dask_version, -) - -from .version import __version__ +from _nebari.stages.bootstrap import CiEnum +from _nebari.stages.kubernetes_ingress import CertificateEnum +from _nebari.stages.kubernetes_keycloak import AuthenticationEnum +from _nebari.stages.terraform_state import TerraformStateEnum +from _nebari.utils import random_secure_string +from _nebari.version import __version__ +from nebari.schema import ProviderEnum logger = logging.getLogger(__name__) WELCOME_HEADER_TEXT = "Your open source data science platform, hosted" -def base_configuration(): - nebari_image_tag = set_docker_image_tag() - return { - "project_name": None, - "provider": None, - "domain": None, - "certificate": { - "type": "self-signed", - }, - "security": { - "authentication": None, - }, - "default_images": { - "jupyterhub": f"quay.io/nebari/nebari-jupyterhub:{nebari_image_tag}", - "jupyterlab": f"quay.io/nebari/nebari-jupyterlab:{nebari_image_tag}", - "dask_worker": f"quay.io/nebari/nebari-dask-worker:{nebari_image_tag}", - }, - "storage": {"conda_store": "200Gi", "shared_filesystem": "200Gi"}, - "theme": { - "jupyterhub": { - "hub_title": None, - "hub_subtitle": None, - "welcome": None, - "logo": "https://raw.githubusercontent.com/nebari-dev/nebari-design/main/logo-mark/horizontal/Nebari-Logo-Horizontal-Lockup-White-text.svg", - "display_version": True, - } - }, - "helm_extensions": [], - "monitoring": { - "enabled": True, - }, - "argo_workflows": { - "enabled": True, - }, - "kbatch": { - "enabled": True, - }, - "cdsdashboards": { - "enabled": True, - "cds_hide_user_named_servers": True, - "cds_hide_user_dashboard_servers": False, - }, - } - - -def default_environments(): - nebari_dask_version = set_nebari_dask_version() - return { - "environment-dask.yaml": { - "name": "dask", - "channels": ["conda-forge"], - "dependencies": [ - "python=3.10.8", - "ipykernel=6.21.0", - "ipywidgets==7.7.1", - f"nebari-dask =={nebari_dask_version}", - "python-graphviz=0.20.1", - "pyarrow=10.0.1", - "s3fs=2023.1.0", - "gcsfs=2023.1.0", - "numpy=1.23.5", - "numba=0.56.4", - "pandas=1.5.3", - { - "pip": [ - "kbatch==0.4.1", - ], - }, - ], - }, - "environment-dashboard.yaml": { - "name": "dashboard", - "channels": ["conda-forge"], - "dependencies": [ - "python=3.10", - "cdsdashboards-singleuser=0.6.3", - "cufflinks-py=0.17.3", - "dash=2.8.1", - "geopandas=0.12.2", - "geopy=2.3.0", - "geoviews=1.9.6", - "gunicorn=20.1.0", - "holoviews=1.15.4", - "ipykernel=6.21.2", - "ipywidgets=8.0.4", - "jupyter=1.0.0", - "jupyterlab=3.6.1", - "jupyter_bokeh=3.0.5", - "matplotlib=3.7.0", - f"nebari-dask=={nebari_dask_version}", - "nodejs=18.12.1", - "numpy", - "openpyxl=3.1.1", - "pandas=1.5.3", - "panel=0.14.3", - "param=1.12.3", - "plotly=5.13.0", - "python-graphviz=0.20.1", - "rich=13.3.1", - "streamlit=1.9.0", - "sympy=1.11.1", - "voila=0.4.0", - "pip=23.0", - { - "pip": [ - "streamlit-image-comparison==0.0.3", - "noaa-coops==0.2.1", - "dash_core_components==2.0.0", - "dash_html_components==2.0.0", - ], - }, - ], - }, - } - - -def __getattr__(name): - if name == "nebari_image_tag": - return set_docker_image_tag() - elif name == "nebari_dask_version": - return set_nebari_dask_version() - elif name == "BASE_CONFIGURATION": - return base_configuration() - elif name == "DEFAULT_ENVIRONMENTS": - return default_environments() - - -CICD_CONFIGURATION = { - "type": "PLACEHOLDER", - "branch": "main", - "commit_render": True, -} - -AUTH_PASSWORD = { - "type": "password", -} - -AUTH_OAUTH_GITHUB = { - "type": "GitHub", - "config": { - "client_id": "PLACEHOLDER", - "client_secret": "PLACEHOLDER", - }, -} - -AUTH_OAUTH_AUTH0 = { - "type": "Auth0", - "config": { - "client_id": "PLACEHOLDER", - "client_secret": "PLACEHOLDER", - "auth0_subdomain": "PLACEHOLDER", - }, -} - -LOCAL = { - "node_selectors": { - "general": { - "key": "kubernetes.io/os", - "value": "linux", - }, - "user": { - "key": "kubernetes.io/os", - "value": "linux", - }, - "worker": { - "key": "kubernetes.io/os", - "value": "linux", - }, - } -} - -EXISTING = { - "node_selectors": { - "general": { - "key": "kubernetes.io/os", - "value": "linux", - }, - "user": { - "key": "kubernetes.io/os", - "value": "linux", - }, - "worker": { - "key": "kubernetes.io/os", - "value": "linux", - }, - } -} - -DIGITAL_OCEAN = { - "region": "nyc3", - "kubernetes_version": "PLACEHOLDER", - "node_groups": { - "general": {"instance": "g-8vcpu-32gb", "min_nodes": 1, "max_nodes": 1}, - "user": {"instance": "g-4vcpu-16gb", "min_nodes": 1, "max_nodes": 5}, - "worker": {"instance": "g-4vcpu-16gb", "min_nodes": 1, "max_nodes": 5}, - }, -} -# Digital Ocean image slugs are listed here https://slugs.do-api.dev/ - -GOOGLE_PLATFORM = { - "project": "PLACEHOLDER", - "region": "us-central1", - "kubernetes_version": "PLACEHOLDER", - "node_groups": { - "general": {"instance": "n1-standard-8", "min_nodes": 1, "max_nodes": 1}, - "user": {"instance": "n1-standard-4", "min_nodes": 0, "max_nodes": 5}, - "worker": {"instance": "n1-standard-4", "min_nodes": 0, "max_nodes": 5}, - }, -} - -AZURE = { - "region": "Central US", - "kubernetes_version": "PLACEHOLDER", - "node_groups": { - "general": { - "instance": "Standard_D8_v3", - "min_nodes": 1, - "max_nodes": 1, - }, - "user": {"instance": "Standard_D4_v3", "min_nodes": 0, "max_nodes": 5}, - "worker": { - "instance": "Standard_D4_v3", - "min_nodes": 0, - "max_nodes": 5, - }, - }, - "storage_account_postfix": "".join( - random.choices("abcdefghijklmnopqrstuvwxyz0123456789", k=8) - ), -} - -AMAZON_WEB_SERVICES = { - "region": "us-west-2", - "kubernetes_version": "PLACEHOLDER", - "node_groups": { - "general": {"instance": "m5.2xlarge", "min_nodes": 1, "max_nodes": 1}, - "user": { - "instance": "m5.xlarge", - "min_nodes": 1, - "max_nodes": 5, - "single_subnet": False, - }, - "worker": { - "instance": "m5.xlarge", - "min_nodes": 1, - "max_nodes": 5, - "single_subnet": False, - }, - }, -} - -DEFAULT_PROFILES = { - "jupyterlab": [ - { - "display_name": "Small Instance", - "description": "Stable environment with 2 cpu / 8 GB ram", - "default": True, - "kubespawner_override": { - "cpu_limit": 2, - "cpu_guarantee": 1.5, - "mem_limit": "8G", - "mem_guarantee": "5G", - }, - }, - { - "display_name": "Medium Instance", - "description": "Stable environment with 4 cpu / 16 GB ram", - "kubespawner_override": { - "cpu_limit": 4, - "cpu_guarantee": 3, - "mem_limit": "16G", - "mem_guarantee": "10G", - }, - }, - ], - "dask_worker": { - "Small Worker": { - "worker_cores_limit": 2, - "worker_cores": 1.5, - "worker_memory_limit": "8G", - "worker_memory": "5G", - "worker_threads": 2, - }, - "Medium Worker": { - "worker_cores_limit": 4, - "worker_cores": 3, - "worker_memory_limit": "16G", - "worker_memory": "10G", - "worker_threads": 4, - }, - }, -} - - def render_config( - project_name, - nebari_domain, - cloud_provider, - ci_provider, - repository, - auth_provider, - namespace=None, - repository_auto_provision=False, - auth_auto_provision=False, - terraform_state=None, - kubernetes_version=None, - disable_prompt=False, - ssl_cert_email=None, + project_name: str, + nebari_domain: str = None, + cloud_provider: ProviderEnum = ProviderEnum.local, + ci_provider: CiEnum = CiEnum.none, + repository: str = None, + auth_provider: AuthenticationEnum = AuthenticationEnum.password, + namespace: str = "dev", + repository_auto_provision: bool = False, + auth_auto_provision: bool = False, + terraform_state: TerraformStateEnum = TerraformStateEnum.remote, + kubernetes_version: str = None, + disable_prompt: bool = False, + ssl_cert_email: str = None, ): - config = base_configuration().copy() - config["provider"] = cloud_provider - - if ci_provider is not None and ci_provider != "none": - config["ci_cd"] = CICD_CONFIGURATION.copy() - config["ci_cd"]["type"] = ci_provider - - if terraform_state is not None: - config["terraform_state"] = {"type": terraform_state} + config = { + "provider": cloud_provider.value, + "namespace": namespace, + "nebari_version": __version__, + } if project_name is None and not disable_prompt: project_name = input("Provide project name: ") config["project_name"] = project_name - if not re.match(namestr_regex, project_name): - raise ValueError( - "project name should contain only letters and hyphens/underscores (but not at the start or end)" - ) - - if namespace is not None: - config["namespace"] = namespace - - if not re.match(namestr_regex, namespace): - raise ValueError( - "namespace should contain only letters and hyphens/underscores (but not at the start or end)" - ) + if nebari_domain is not None: + config["domain"] = nebari_domain - if nebari_domain is None and not disable_prompt: - nebari_domain = input("Provide domain: ") - config["domain"] = nebari_domain - - # In nebari_version only use major.minor.patch version - drop any pre/post/dev suffixes - config["nebari_version"] = __version__ - - # Generate default password for Keycloak root user and also example-user if using password auth - default_password = "".join( - secrets.choice(string.ascii_letters + string.digits) for i in range(16) - ) + config["ci_cd"] = {"type": ci_provider.value} + config["terraform_state"] = {"type": terraform_state.value} # Save default password to file default_password_filename = Path(tempfile.gettempdir()) / "NEBARI_DEFAULT_PASSWORD" - with open(default_password_filename, "w") as f: - f.write(default_password) + config["security"] = { + "keycloak": {"initial_root_password": random_secure_string(length=32)} + } + with default_password_filename.open("w") as f: + f.write(config["security"]["keycloak"]["initial_root_password"]) default_password_filename.chmod(0o700) - config["theme"]["jupyterhub"]["hub_title"] = f"Nebari - { project_name }" + config["theme"] = {"jupyterhub": {"hub_title": f"Nebari - { project_name }"}} config["theme"]["jupyterhub"][ "welcome" ] = """Welcome! Learn about Nebari's features and configurations in the documentation. If you have any questions or feedback, reach the team on Nebari's support forums.""" - if auth_provider == "github": - config["security"]["authentication"] = AUTH_OAUTH_GITHUB.copy() + config["security"]["authentication"] = {"type": auth_provider.value} + if auth_provider == AuthenticationEnum.github: if not disable_prompt: - config["security"]["authentication"]["config"]["client_id"] = input( - "Github client_id: " - ) - config["security"]["authentication"]["config"]["client_secret"] = input( - "Github client_secret: " - ) - elif auth_provider == "auth0": - config["security"]["authentication"] = AUTH_OAUTH_AUTH0.copy() - - elif auth_provider == "password": - config["security"]["authentication"] = AUTH_PASSWORD.copy() - - # Always use default password for keycloak root - config["security"].setdefault("keycloak", {})[ - "initial_root_password" - ] = default_password + config["security"]["authentication"]["config"] = { + "client_id": input("Github client_id: "), + "client_secret": input("Github client_secret: "), + } + elif auth_provider == AuthenticationEnum.auth0: + if auth_auto_provision: + auth0_config = create_client(config.domain, config.project_name) + config["security"]["authentication"]["config"] = auth0_config + else: + config["security"]["authentication"]["config"] = { + "client_id": input("Auth0 client_id: "), + "client_secret": input("Auth0 client_secret: "), + "auth0_subdomain": input("Auth0 subdomain: "), + } - if cloud_provider == "do": + if cloud_provider == ProviderEnum.do: config["theme"]["jupyterhub"][ "hub_subtitle" ] = f"{WELCOME_HEADER_TEXT} on Digital Ocean" - config["digital_ocean"] = DIGITAL_OCEAN.copy() - set_kubernetes_version(config, kubernetes_version, cloud_provider) - - elif cloud_provider == "gcp": + if kubernetes_version is not None: + config["digital_ocean"] = {"kubernetes_version": kubernetes_version} + elif cloud_provider == ProviderEnum.gcp: config["theme"]["jupyterhub"][ "hub_subtitle" ] = f"{WELCOME_HEADER_TEXT} on Google Cloud Platform" - config["google_cloud_platform"] = GOOGLE_PLATFORM.copy() - set_kubernetes_version(config, kubernetes_version, cloud_provider) - + config["google_cloud_platform"] = {} if "PROJECT_ID" in os.environ: config["google_cloud_platform"]["project"] = os.environ["PROJECT_ID"] elif not disable_prompt: @@ -427,50 +104,43 @@ def render_config( "Enter Google Cloud Platform Project ID: " ) - elif cloud_provider == "azure": + if kubernetes_version is not None: + config["google_cloud_platform"]["kubernetes_version"] = kubernetes_version + elif cloud_provider == ProviderEnum.azure: config["theme"]["jupyterhub"][ "hub_subtitle" ] = f"{WELCOME_HEADER_TEXT} on Azure" - config["azure"] = AZURE.copy() - set_kubernetes_version(config, kubernetes_version, cloud_provider) - - elif cloud_provider == "aws": + if kubernetes_version is not None: + config["azure"] = {"kubernetes_version": kubernetes_version} + elif cloud_provider == ProviderEnum.aws: config["theme"]["jupyterhub"][ "hub_subtitle" ] = f"{WELCOME_HEADER_TEXT} on Amazon Web Services" - config["amazon_web_services"] = AMAZON_WEB_SERVICES.copy() - set_kubernetes_version(config, kubernetes_version, cloud_provider) - if "AWS_DEFAULT_REGION" in os.environ: - config["amazon_web_services"]["region"] = os.environ["AWS_DEFAULT_REGION"] - - elif cloud_provider == "existing": + if kubernetes_version is not None: + config["amazon_web_services"] = {"kubernetes_version": kubernetes_version} + elif cloud_provider == ProviderEnum.existing: config["theme"]["jupyterhub"]["hub_subtitle"] = WELCOME_HEADER_TEXT - config["existing"] = EXISTING.copy() - - elif cloud_provider == "local": + elif cloud_provider == ProviderEnum.local: config["theme"]["jupyterhub"]["hub_subtitle"] = WELCOME_HEADER_TEXT - config["local"] = LOCAL.copy() - config["profiles"] = DEFAULT_PROFILES.copy() - config["environments"] = default_environments().copy() + if ssl_cert_email: + config["certificate"] = {"type": CertificateEnum.letsencrypt.value} + config["certificate"]["acme_email"] = ssl_cert_email - if ssl_cert_email is not None: - config["certificate"] = { - "type": "lets-encrypt", - "acme_email": ssl_cert_email, - "acme_server": "https://acme-v02.api.letsencrypt.org/directory", - } + # validate configuration and convert to model + from nebari.plugins import nebari_plugin_manager - if auth_auto_provision: - if auth_provider == "auth0": - auth0_auto_provision(config) + try: + config_model = nebari_plugin_manager.config_schema.parse_obj(config) + except pydantic.ValidationError as e: + print(str(e)) if repository_auto_provision: GITHUB_REGEX = "(https://)?github.com/([^/]+)/([^/]+)/?" if re.search(GITHUB_REGEX, repository): match = re.search(GITHUB_REGEX, repository) git_repository = github_auto_provision( - config, match.group(2), match.group(3) + config_model, match.group(2), match.group(3) ) git_repository_initialize(git_repository) else: @@ -481,11 +151,7 @@ def render_config( return config -def github_auto_provision(config, owner, repo): - check_cloud_credentials( - config - ) # We may need env vars such as AWS_ACCESS_KEY_ID depending on provider - +def github_auto_provision(config: pydantic.BaseModel, owner: str, repo: str): already_exists = True try: github.get_repository(owner, repo) @@ -498,8 +164,8 @@ def github_auto_provision(config, owner, repo): github.create_repository( owner, repo, - description=f'Nebari {config["project_name"]}-{config["provider"]}', - homepage=f'https://{config["domain"]}', + description=f"Nebari {config.project_name}-{config.provider}", + homepage=f"https://{config.domain}", ) except requests.exceptions.HTTPError as he: raise ValueError( @@ -510,7 +176,7 @@ def github_auto_provision(config, owner, repo): try: # Secrets - if config["provider"] == "do": + if config.provider == ProviderEnum.do: for name in { "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", @@ -519,17 +185,17 @@ def github_auto_provision(config, owner, repo): "DIGITALOCEAN_TOKEN", }: github.update_secret(owner, repo, name, os.environ[name]) - elif config["provider"] == "aws": + elif config.provider == ProviderEnum.aws: for name in { "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", }: github.update_secret(owner, repo, name, os.environ[name]) - elif config["provider"] == "gcp": + elif config.provider == ProviderEnum.gcp: github.update_secret(owner, repo, "PROJECT_ID", os.environ["PROJECT_ID"]) with open(os.environ["GOOGLE_CREDENTIALS"]) as f: github.update_secret(owner, repo, "GOOGLE_CREDENTIALS", f.read()) - elif config["provider"] == "azure": + elif config.provider == ProviderEnum.azure: for name in { "ARM_CLIENT_ID", "ARM_CLIENT_SECRET", @@ -552,16 +218,3 @@ def git_repository_initialize(git_repository): if not git.is_git_repo(Path.cwd()): git.initialize_git(Path.cwd()) git.add_git_remote(git_repository, path=Path.cwd(), remote_name="origin") - - -def auth0_auto_provision(config): - auth0_config = create_client(config["domain"], config["project_name"]) - config["security"]["authentication"]["config"]["client_id"] = auth0_config[ - "client_id" - ] - config["security"]["authentication"]["config"]["client_secret"] = auth0_config[ - "client_secret" - ] - config["security"]["authentication"]["config"]["auth0_subdomain"] = auth0_config[ - "auth0_subdomain" - ] diff --git a/src/_nebari/keycloak.py b/src/_nebari/keycloak.py index 4579298c7..674b7c8ca 100644 --- a/src/_nebari/keycloak.py +++ b/src/_nebari/keycloak.py @@ -7,16 +7,13 @@ import requests import rich -from .schema import verify -from .utils import load_yaml +from _nebari.stages.kubernetes_ingress import CertificateEnum +from nebari import schema logger = logging.getLogger(__name__) -def do_keycloak(config_filename, *args): - config = load_yaml(config_filename) - verify(config) - +def do_keycloak(config: schema.Main, *args): # suppress insecure warnings import urllib3 @@ -32,7 +29,7 @@ def do_keycloak(config_filename, *args): username = args[1] password = args[2] if len(args) >= 3 else None - create_user(keycloak_admin, username, password, domain=config["domain"]) + create_user(keycloak_admin, username, password, domain=config.domain) elif args[0] == "listusers": list_users(keycloak_admin) else: @@ -84,18 +81,17 @@ def list_users(keycloak_admin: keycloak.KeycloakAdmin): ) -def get_keycloak_admin_from_config(config): +def get_keycloak_admin_from_config(config: schema.Main): keycloak_server_url = os.environ.get( - "KEYCLOAK_SERVER_URL", f"https://{config['domain']}/auth/" + "KEYCLOAK_SERVER_URL", f"https://{config.domain}/auth/" ) keycloak_username = os.environ.get("KEYCLOAK_ADMIN_USERNAME", "root") keycloak_password = os.environ.get( - "KEYCLOAK_ADMIN_PASSWORD", - config.get("security", {}).get("keycloak", {}).get("initial_root_password", ""), + "KEYCLOAK_ADMIN_PASSWORD", config.security.keycloak.initial_root_password ) - should_verify_tls = config.get("certificate", {}).get("type", "") != "self-signed" + should_verify_tls = config.certificate.type != CertificateEnum.selfsigned try: keycloak_admin = keycloak.KeycloakAdmin( @@ -116,17 +112,14 @@ def get_keycloak_admin_from_config(config): return keycloak_admin -def keycloak_rest_api_call(config=None, request: str = None): +def keycloak_rest_api_call(config: schema.Main = None, request: str = None): """Communicate directly with the Keycloak REST API by passing it a request""" - - config = load_yaml(config) - - keycloak_server_url = f"https://{config['domain']}/auth/" + keycloak_server_url = f"https://{config.domain}/auth/" keycloak_admin_username = os.environ.get("KEYCLOAK_ADMIN_USERNAME", "root") keycloak_admin_password = os.environ.get( "KEYCLOAK_ADMIN_PASSWORD", - config.get("security", {}).get("keycloak", {}).get("initial_root_password", ""), + config.security.keycloak.initial_root_password, ) try: @@ -184,7 +177,7 @@ def keycloak_rest_api_call(config=None, request: str = None): raise e -def export_keycloak_users(config, realm): +def export_keycloak_users(config: schema.Main, realm: str): request = f"GET /{realm}/users" users = keycloak_rest_api_call(config, request=request) diff --git a/src/_nebari/provider/cicd/github.py b/src/_nebari/provider/cicd/github.py index 84ab389b5..b02c0bf32 100644 --- a/src/_nebari/provider/cicd/github.py +++ b/src/_nebari/provider/cicd/github.py @@ -8,6 +8,7 @@ from _nebari.constants import LATEST_SUPPORTED_PYTHON_VERSION from _nebari.provider.cicd.common import pip_install_nebari +from nebari import schema GITHUB_BASE_URL = "https://api.github.com/" @@ -96,7 +97,7 @@ def create_repository(owner, repo, description, homepage, private=True): return f"git@github.com:{owner}/{repo}.git" -def gha_env_vars(config): +def gha_env_vars(config: schema.Main): env_vars = { "GITHUB_TOKEN": "${{ secrets.GITHUB_TOKEN }}", } @@ -105,29 +106,29 @@ def gha_env_vars(config): env_vars["NEBARI_GH_BRANCH"] = "${{ secrets.NEBARI_GH_BRANCH }}" # This assumes that the user is using the omitting sensitive values configuration for the token. - if config.get("prefect", {}).get("enabled", False): + if config.prefect.enabled: env_vars[ "NEBARI_SECRET_prefect_token" ] = "${{ secrets.NEBARI_SECRET_PREFECT_TOKEN }}" - if config["provider"] == "aws": + if config.provider == schema.ProviderEnum.aws: env_vars["AWS_ACCESS_KEY_ID"] = "${{ secrets.AWS_ACCESS_KEY_ID }}" env_vars["AWS_SECRET_ACCESS_KEY"] = "${{ secrets.AWS_SECRET_ACCESS_KEY }}" env_vars["AWS_DEFAULT_REGION"] = "${{ secrets.AWS_DEFAULT_REGION }}" - elif config["provider"] == "azure": + elif config.provider == schema.ProviderEnum.azure: env_vars["ARM_CLIENT_ID"] = "${{ secrets.ARM_CLIENT_ID }}" env_vars["ARM_CLIENT_SECRET"] = "${{ secrets.ARM_CLIENT_SECRET }}" env_vars["ARM_SUBSCRIPTION_ID"] = "${{ secrets.ARM_SUBSCRIPTION_ID }}" env_vars["ARM_TENANT_ID"] = "${{ secrets.ARM_TENANT_ID }}" - elif config["provider"] == "do": + elif config.provider == schema.ProviderEnum.do: env_vars["AWS_ACCESS_KEY_ID"] = "${{ secrets.AWS_ACCESS_KEY_ID }}" env_vars["AWS_SECRET_ACCESS_KEY"] = "${{ secrets.AWS_SECRET_ACCESS_KEY }}" env_vars["SPACES_ACCESS_KEY_ID"] = "${{ secrets.SPACES_ACCESS_KEY_ID }}" env_vars["SPACES_SECRET_ACCESS_KEY"] = "${{ secrets.SPACES_SECRET_ACCESS_KEY }}" env_vars["DIGITALOCEAN_TOKEN"] = "${{ secrets.DIGITALOCEAN_TOKEN }}" - elif config["provider"] == "gcp": + elif config.provider == schema.ProviderEnum.gcp: env_vars["GOOGLE_CREDENTIALS"] = "${{ secrets.GOOGLE_CREDENTIALS }}" - elif config["provider"] in ["local", "existing"]: + elif config.provider in [schema.ProviderEnum.local, schema.ProviderEnum.existing]: # create mechanism to allow for extra env vars? pass else: @@ -231,19 +232,16 @@ def install_nebari_step(nebari_version): def gen_nebari_ops(config): env_vars = gha_env_vars(config) - branch = config["ci_cd"]["branch"] - commit_render = config["ci_cd"].get("commit_render", True) - nebari_version = config["nebari_version"] - push = GHA_on_extras(branches=[branch], paths=["nebari-config.yaml"]) + push = GHA_on_extras(branches=[config.ci_cd.branch], paths=["nebari-config.yaml"]) on = GHA_on(__root__={"push": push}) step1 = checkout_image_step() step2 = setup_python_step() - step3 = install_nebari_step(nebari_version) + step3 = install_nebari_step(config.nebari_version) gha_steps = [step1, step2, step3] - for step in config["ci_cd"].get("before_script", []): + for step in config.ci_cd.before_script: gha_steps.append(GHA_job_step(**step)) step4 = GHA_job_step( @@ -259,7 +257,7 @@ def gen_nebari_ops(config): "git config user.name 'github action' ; " "git add ./.gitignore ./.github ./stages; " "git diff --quiet && git diff --staged --quiet || (git commit -m '${{ env.COMMIT_MSG }}') ; " - f"git push origin {branch}" + f"git push origin {config.ci_cd.branch}" ), env={ "COMMIT_MSG": GHA_job_steps_extras( @@ -267,10 +265,10 @@ def gen_nebari_ops(config): ) }, ) - if commit_render: + if config.ci_cd.commit_render: gha_steps.append(step5) - for step in config["ci_cd"].get("after_script", []): + for step in config.ci_cd.after_script: gha_steps.append(GHA_job_step(**step)) job1 = GHA_job_id( @@ -300,15 +298,14 @@ def gen_nebari_linter(config): else: env_vars = None - branch = config["ci_cd"]["branch"] - nebari_version = config["nebari_version"] - - pull_request = GHA_on_extras(branches=[branch], paths=["nebari-config.yaml"]) + pull_request = GHA_on_extras( + branches=[config.ci_cd.branch], paths=["nebari-config.yaml"] + ) on = GHA_on(__root__={"pull_request": pull_request}) step1 = checkout_image_step() step2 = setup_python_step() - step3 = install_nebari_step(nebari_version) + step3 = install_nebari_step(config.nebari_version) step4_envs = { "PR_NUMBER": GHA_job_steps_extras(__root__="${{ github.event.number }}"), diff --git a/src/_nebari/provider/cicd/gitlab.py b/src/_nebari/provider/cicd/gitlab.py index d5b6726ba..e2d02b388 100644 --- a/src/_nebari/provider/cicd/gitlab.py +++ b/src/_nebari/provider/cicd/gitlab.py @@ -38,19 +38,13 @@ class GLCI(BaseModel): def gen_gitlab_ci(config): - branch = config["ci_cd"]["branch"] - commit_render = config["ci_cd"].get("commit_render", True) - before_script = config["ci_cd"].get("before_script") - after_script = config["ci_cd"].get("after_script") - pip_install = pip_install_nebari(config["nebari_version"]) - render_vars = { "COMMIT_MSG": "nebari-config.yaml automated commit: {{ '$CI_COMMIT_SHA' }}", } script = [ - f"git checkout {branch}", - f"{pip_install}", + f"git checkout {config.ci_cd.branch}", + pip_install_nebari(config.nebari_version), "nebari deploy --config nebari-config.yaml --disable-prompt --skip-remote-state-provision", ] @@ -59,15 +53,15 @@ def gen_gitlab_ci(config): "git config user.name 'gitlab ci'", "git add .", "git diff --quiet && git diff --staged --quiet || (git commit -m '${COMMIT_MSG}'", - f"git push origin {branch})", + f"git push origin {config.ci_cd.branch})", ] - if commit_render: + if config.ci_cd.commit_render: script += commit_render_script rules = [ GLCI_rules( - if_=f"$CI_COMMIT_BRANCH == '{branch}'", + if_=f"$CI_COMMIT_BRANCH == '{config.ci_cd.branch}'", changes=["nebari-config.yaml"], ) ] @@ -75,8 +69,8 @@ def gen_gitlab_ci(config): render_nebari = GLCI_job( image=f"python:{LATEST_SUPPORTED_PYTHON_VERSION}", variables=render_vars, - before_script=before_script, - after_script=after_script, + before_script=config.ci_cd.before_script, + after_script=config.ci_cd.after_script, script=script, rules=rules, ) diff --git a/src/_nebari/provider/cloud/amazon_web_services.py b/src/_nebari/provider/cloud/amazon_web_services.py index 218aebecf..943f9d3e8 100644 --- a/src/_nebari/provider/cloud/amazon_web_services.py +++ b/src/_nebari/provider/cloud/amazon_web_services.py @@ -5,9 +5,23 @@ import boto3 +from _nebari import constants from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version +def check_credentials(): + for variable in { + "AWS_DEFAULT_REGION", + "AWS_ACCESS_KEY_ID", + "AWS_SECRET_ACCESS_KEY", + }: + if variable not in os.environ: + raise ValueError( + f"""Missing the following required environment variable: {variable}\n + Please see the documentation for more information: {constants.AWS_ENV_DOCS}""" + ) + + @functools.lru_cache() def regions(): output = subprocess.check_output(["aws", "ec2", "describe-regions"]) @@ -16,7 +30,7 @@ def regions(): @functools.lru_cache() -def zones(region): +def zones(region: str = "us-west-2"): output = subprocess.check_output( ["aws", "ec2", "describe-availability-zones", "--region", region] ) @@ -30,6 +44,7 @@ def kubernetes_versions(region="us-west-2"): # AWS SDK (boto3) currently doesn't offer an intuitive way to list available kubernetes version. This implementation grabs kubernetes versions for specific EKS addons. It will therefore always be (at the very least) a subset of all kubernetes versions still supported by AWS. if not os.getenv("AWS_DEFAULT_REGION"): os.environ["AWS_DEFAULT_REGION"] = region + client = boto3.client("eks") supported_kubernetes_versions = list() available_addons = client.describe_addon_versions() @@ -45,7 +60,7 @@ def kubernetes_versions(region="us-west-2"): @functools.lru_cache() -def instances(region): +def instances(region: str = "us-west-2"): output = subprocess.check_output( ["aws", "ec2", "describe-instance-types", "--region", region] ) diff --git a/src/_nebari/provider/cloud/azure_cloud.py b/src/_nebari/provider/cloud/azure_cloud.py index 7ef13da22..170a301b8 100644 --- a/src/_nebari/provider/cloud/azure_cloud.py +++ b/src/_nebari/provider/cloud/azure_cloud.py @@ -5,12 +5,27 @@ from azure.identity import DefaultAzureCredential from azure.mgmt.containerservice import ContainerServiceClient +from _nebari import constants from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version logger = logging.getLogger("azure") logger.setLevel(logging.ERROR) +def check_credentials(): + for variable in { + "ARM_CLIENT_ID", + "ARM_CLIENT_SECRET", + "ARM_SUBSCRIPTION_ID", + "ARM_TENANT_ID", + }: + if variable not in os.environ: + raise ValueError( + f"""Missing the following required environment variable: {variable}\n + Please see the documentation for more information: {constants.AZURE_ENV_DOCS}""" + ) + + @functools.lru_cache() def initiate_container_service_client(): subscription_id = os.environ.get("ARM_SUBSCRIPTION_ID", None) diff --git a/src/_nebari/provider/cloud/commons.py b/src/_nebari/provider/cloud/commons.py index ed1ed89b7..a12dbec8b 100644 --- a/src/_nebari/provider/cloud/commons.py +++ b/src/_nebari/provider/cloud/commons.py @@ -1,9 +1,14 @@ +import re + from _nebari.constants import HIGHEST_SUPPORTED_K8S_VERSION def filter_by_highest_supported_k8s_version(k8s_versions_list): filtered_k8s_versions_list = [] for k8s_version in k8s_versions_list: - if k8s_version.split("-")[0] <= HIGHEST_SUPPORTED_K8S_VERSION: + version = tuple( + filter(None, re.search("(\d+)\.(\d+)(?:\.(\d+))?", k8s_version).groups()) + ) + if version <= HIGHEST_SUPPORTED_K8S_VERSION: filtered_k8s_versions_list.append(k8s_version) return filtered_k8s_versions_list diff --git a/src/_nebari/provider/cloud/digital_ocean.py b/src/_nebari/provider/cloud/digital_ocean.py index fd55672f1..7998bb1af 100644 --- a/src/_nebari/provider/cloud/digital_ocean.py +++ b/src/_nebari/provider/cloud/digital_ocean.py @@ -1,11 +1,26 @@ import functools import os +import typing import requests +from _nebari import constants from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version +def check_credentials(): + for variable in { + "SPACES_ACCESS_KEY_ID", + "SPACES_SECRET_ACCESS_KEY", + "DIGITALOCEAN_TOKEN", + }: + if variable not in os.environ: + raise ValueError( + f"""Missing the following required environment variable: {variable}\n + Please see the documentation for more information: {constants.DO_ENV_DOCS}""" + ) + + def digital_ocean_request(url, method="GET", json=None): BASE_DIGITALOCEAN_URL = "https://api.digitalocean.com/v2/" @@ -41,7 +56,7 @@ def regions(): return _kubernetes_options()["options"]["regions"] -def kubernetes_versions(region): +def kubernetes_versions(region) -> typing.List[str]: """Return list of available kubernetes supported by cloud provider. Sorted from oldest to latest.""" supported_kubernetes_versions = sorted( [_["slug"].split("-")[0] for _ in _kubernetes_options()["options"]["versions"]] diff --git a/src/_nebari/provider/cloud/google_cloud.py b/src/_nebari/provider/cloud/google_cloud.py index a8e6d542f..810011ff5 100644 --- a/src/_nebari/provider/cloud/google_cloud.py +++ b/src/_nebari/provider/cloud/google_cloud.py @@ -1,10 +1,21 @@ import functools import json +import os import subprocess +from _nebari import constants from _nebari.provider.cloud.commons import filter_by_highest_supported_k8s_version +def check_credentials(): + for variable in {"GOOGLE_CREDENTIALS"}: + if variable not in os.environ: + raise ValueError( + f"""Missing the following required environment variable: {variable}\n + Please see the documentation for more information: {constants.GCP_ENV_DOCS}""" + ) + + @functools.lru_cache() def projects(): output = subprocess.check_output(["gcloud", "projects", "list", "--format=json"]) diff --git a/src/_nebari/provider/oauth/auth0.py b/src/_nebari/provider/oauth/auth0.py index 525811052..dd714ec48 100644 --- a/src/_nebari/provider/oauth/auth0.py +++ b/src/_nebari/provider/oauth/auth0.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -def create_client(jupyterhub_endpoint, project_name, reuse_existing=True): +def create_client(jupyterhub_endpoint: str, project_name: str, reuse_existing=True): for variable in {"AUTH0_DOMAIN", "AUTH0_CLIENT_ID", "AUTH0_CLIENT_SECRET"}: if variable not in os.environ: raise ValueError(f"Required environment variable={variable} not defined") diff --git a/src/_nebari/render.py b/src/_nebari/render.py index c25aa9c2e..8679a80cd 100644 --- a/src/_nebari/render.py +++ b/src/_nebari/render.py @@ -1,35 +1,27 @@ import functools import hashlib -import json import os +import pathlib import shutil import sys -from pathlib import Path from typing import Dict, List -import yaml from rich import print from rich.table import Table -from ruamel.yaml import YAML -import _nebari from _nebari.deprecate import DEPRECATED_FILE_PATHS -from _nebari.provider.cicd.github import gen_nebari_linter, gen_nebari_ops -from _nebari.provider.cicd.gitlab import gen_gitlab_ci -from _nebari.stages import tf_objects from _nebari.utils import is_relative_to +from nebari import hookspecs, schema -def render_template(output_directory, config_filename, dry_run=False): - # get directory for nebari templates - template_directory = Path(_nebari.__file__).parent / "template" - - # would be nice to remove assumption that input directory - # is in local filesystem and a directory - if not template_directory.is_dir(): - raise ValueError(f"template directory={template_directory} is not a directory") - - if output_directory == Path.home(): +def render_template( + output_directory: pathlib.Path, + config: schema.Main, + stages: List[hookspecs.NebariStage], + dry_run=False, +): + output_directory = pathlib.Path(output_directory).resolve() + if output_directory == pathlib.Path.home(): print("ERROR: Deploying Nebari in home directory is not advised!") sys.exit(1) @@ -37,46 +29,13 @@ def render_template(output_directory, config_filename, dry_run=False): # into it in remove_existing_renders output_directory.mkdir(exist_ok=True, parents=True) - if not config_filename.is_file(): - raise ValueError( - f"cookiecutter configuration={config_filename} is not filename" + contents = {} + for stage in stages: + contents.update( + stage(output_directory=output_directory, config=config).render() ) - with open(config_filename) as f: - yaml = YAML(typ="safe", pure=True) - config = yaml.load(f) - - # For any config values that start with - # NEBARI_SECRET_, set the values using the - # corresponding env var. - set_env_vars_in_config(config) - - config["repo_directory"] = output_directory.name - config["nebari_config_yaml_path"] = str(config_filename.absolute()) - - contents = render_contents(config) - - directories = [ - f"stages/02-infrastructure/{config['provider']}", - "stages/03-kubernetes-initialize", - "stages/04-kubernetes-ingress", - "stages/05-kubernetes-keycloak", - "stages/06-kubernetes-keycloak-configuration", - "stages/07-kubernetes-services", - "stages/08-nebari-tf-extensions", - ] - if ( - config["provider"] not in {"existing", "local"} - and config["terraform_state"]["type"] == "remote" - ): - directories.append(f"stages/01-terraform-state/{config['provider']}") - - source_dirs = [template_directory / Path(directory) for directory in directories] - output_dirs = [output_directory / Path(directory) for directory in directories] new, untracked, updated, deleted = inspect_files( - source_dirs, - output_dirs, - source_base_dir=template_directory, output_base_dir=output_directory, ignore_filenames=[ "terraform.tfstate", @@ -119,15 +78,15 @@ def render_template(output_directory, config_filename, dry_run=False): print("dry-run enabled no files will be created, updated, or deleted") else: for filename in new | updated: - input_filename = template_directory / filename output_filename = output_directory / filename output_filename.parent.mkdir(parents=True, exist_ok=True) - if input_filename.exists(): - shutil.copy(input_filename, output_filename) - else: + if isinstance(contents[filename], str): with open(output_filename, "w") as f: f.write(contents[filename]) + else: + with open(output_filename, "wb") as f: + f.write(contents[filename]) for path in deleted: abs_path = (output_directory / path).resolve() @@ -143,102 +102,17 @@ def render_template(output_directory, config_filename, dry_run=False): shutil.rmtree(abs_path) -def render_contents(config: Dict): - """Dynamically generated contents from _nebari configuration.""" - contents = { - **tf_objects.stage_01_terraform_state(config), - **tf_objects.stage_02_infrastructure(config), - **tf_objects.stage_03_kubernetes_initialize(config), - **tf_objects.stage_04_kubernetes_ingress(config), - **tf_objects.stage_05_kubernetes_keycloak(config), - **tf_objects.stage_06_kubernetes_keycloak_configuration(config), - **tf_objects.stage_07_kubernetes_services(config), - **tf_objects.stage_08_nebari_tf_extensions(config), - } - - if config.get("ci_cd"): - for fn, workflow in gen_cicd(config).items(): - workflow_json = workflow.json( - indent=2, - by_alias=True, - exclude_unset=True, - exclude_defaults=True, - ) - workflow_yaml = yaml.dump( - json.loads(workflow_json), sort_keys=False, indent=2 - ) - contents.update({fn: workflow_yaml}) - - contents.update(gen_gitignore()) - - return contents - - -def gen_gitignore(): - """ - Generate `.gitignore` file. - Add files as needed. - """ - from inspect import cleandoc - - files_to_ignore = """ - # ignore terraform state - .terraform - terraform.tfstate - terraform.tfstate.backup - .terraform.tfstate.lock.info - - # python - __pycache__ - """ - return {Path(".gitignore"): cleandoc(files_to_ignore)} - - -def gen_cicd(config): - """ - Use cicd schema to generate workflow files based on the - `ci_cd` key in the `config`. - - For more detail on schema: - GiHub-Actions - nebari/providers/cicd/github.py - GitLab-CI - nebari/providers/cicd/gitlab.py - """ - cicd_files = {} - cicd_provider = config["ci_cd"]["type"] - - if cicd_provider == "github-actions": - gha_dir = Path(".github") / "workflows" - cicd_files[gha_dir / "nebari-ops.yaml"] = gen_nebari_ops(config) - cicd_files[gha_dir / "nebari-linter.yaml"] = gen_nebari_linter(config) - - elif cicd_provider == "gitlab-ci": - cicd_files[Path(".gitlab-ci.yml")] = gen_gitlab_ci(config) - - else: - raise ValueError( - f"The ci_cd provider, {cicd_provider}, is not supported. Supported providers include: `github-actions`, `gitlab-ci`." - ) - - return cicd_files - - def inspect_files( - source_dirs: Path, - output_dirs: Path, - source_base_dir: Path, - output_base_dir: Path, + output_base_dir: pathlib.Path, ignore_filenames: List[str] = None, ignore_directories: List[str] = None, - deleted_paths: List[Path] = None, + deleted_paths: List[pathlib.Path] = None, contents: Dict[str, str] = None, ): """Return created, updated and untracked files by computing a checksum over the provided directory. Args: - source_dirs (Path): The source dir used as base for comparison - output_dirs (Path): The destination dir which will be matched with - source_base_dir (Path): Relative base path to source directory - output_base_dir (Path): Relative base path to output directory + output_base_dir (str): Relative base path to output directory ignore_filenames (list[str]): Filenames to ignore while comparing for changes ignore_directories (list[str]): Directories to ignore while comparing for changes deleted_paths (list[Path]): Paths that if exist in output directory should be deleted @@ -252,25 +126,24 @@ def inspect_files( output_files = {} def list_files( - directory: Path, ignore_filenames: List[str], ignore_directories: List[str] + directory: pathlib.Path, + ignore_filenames: List[str], + ignore_directories: List[str], ): for path in directory.rglob("*"): if not path.is_file(): continue - - if path.name in ignore_filenames: - continue - - if any( - d in ignore_directories for d in path.relative_to(directory).parts[:-1] - ): - continue - yield path - for filename, content in contents.items(): - source_files[filename] = hashlib.sha256(content.encode("utf8")).hexdigest() - output_filename = output_base_dir / filename + for filename in contents: + if isinstance(contents[filename], str): + source_files[filename] = hashlib.sha256( + contents[filename].encode("utf8") + ).hexdigest() + else: + source_files[filename] = hashlib.sha256(contents[filename]).hexdigest() + + output_filename = pathlib.Path(output_base_dir) / filename if output_filename.is_file(): output_files[filename] = hash_file(filename) @@ -280,13 +153,11 @@ def list_files( if absolute_path.exists(): deleted_files.add(path) - for source_dir, output_dir in zip(source_dirs, output_dirs): - for filename in list_files(source_dir, ignore_filenames, ignore_directories): - relative_path = filename.relative_to(source_base_dir) - source_files[relative_path] = hash_file(filename) - - for filename in list_files(output_dir, ignore_filenames, ignore_directories): - relative_path = filename.relative_to(output_base_dir) + for filename in list_files(output_base_dir, ignore_filenames, ignore_directories): + relative_path = pathlib.Path.relative_to( + pathlib.Path(filename), output_base_dir + ) + if filename.is_file(): output_files[relative_path] = hash_file(filename) new_files = source_files.keys() - output_files.keys() diff --git a/src/_nebari/schema.py b/src/_nebari/schema.py deleted file mode 100644 index 6966e3b5b..000000000 --- a/src/_nebari/schema.py +++ /dev/null @@ -1,653 +0,0 @@ -import enum -import typing -from abc import ABC - -import pydantic -from pydantic import root_validator, validator - -from _nebari.utils import namestr_regex - -from .version import __version__, rounded_ver_parse - - -class CertificateEnum(str, enum.Enum): - letsencrypt = "lets-encrypt" - selfsigned = "self-signed" - existing = "existing" - disabled = "disabled" - - -class TerraformStateEnum(str, enum.Enum): - remote = "remote" - local = "local" - existing = "existing" - - -class ProviderEnum(str, enum.Enum): - local = "local" - existing = "existing" - do = "do" - aws = "aws" - gcp = "gcp" - azure = "azure" - - -class GitRepoEnum(str, enum.Enum): - github = "github.com" - gitlab = "gitlab.com" - - -class CiEnum(str, enum.Enum): - github_actions = "github-actions" - gitlab_ci = "gitlab-ci" - none = "none" - - -class AuthenticationEnum(str, enum.Enum): - password = "password" - github = "GitHub" - auth0 = "Auth0" - custom = "custom" - - -class AccessEnum(str, enum.Enum): - all = "all" - yaml = "yaml" - keycloak = "keycloak" - - -class Base(pydantic.BaseModel): - ... - - class Config: - extra = "forbid" - - -# ============== CI/CD ============= - - -class CICD(Base): - type: CiEnum - branch: str - commit_render: typing.Optional[bool] = True - before_script: typing.Optional[typing.List[typing.Union[str, typing.Dict]]] - after_script: typing.Optional[typing.List[typing.Union[str, typing.Dict]]] - - -# ======== Generic Helm Extensions ======== -class HelmExtension(Base): - name: str - repository: str - chart: str - version: str - overrides: typing.Optional[typing.Dict] - - -# ============== Argo-Workflows ========= - - -class NebariWorkflowController(Base): - enabled: bool - image_tag: typing.Optional[str] - - -class ArgoWorkflows(Base): - enabled: bool - overrides: typing.Optional[typing.Dict] - nebari_workflow_controller: typing.Optional[NebariWorkflowController] - - -# ============== kbatch ============= - - -class KBatch(Base): - enabled: bool - - -# ============== Monitoring ============= - - -class Monitoring(Base): - enabled: bool - - -# ============== ClearML ============= - - -class ClearML(Base): - enabled: bool - enable_forward_auth: typing.Optional[bool] - overrides: typing.Optional[typing.Dict] - - -# ============== Prefect ============= - - -class Prefect(Base): - enabled: bool - image: typing.Optional[str] - overrides: typing.Optional[typing.Dict] - - -# =========== Conda-Store ============== - - -class CondaStore(Base): - extra_settings: typing.Optional[typing.Dict[str, typing.Any]] = {} - extra_config: typing.Optional[str] = "" - image_tag: typing.Optional[str] = "" - default_namespace: typing.Optional[str] = "" - - -# ============= Terraform =============== - - -class TerraformState(Base): - type: TerraformStateEnum - backend: typing.Optional[str] - config: typing.Optional[typing.Dict[str, str]] - - -# ============ Certificate ============= - - -class Certificate(Base): - type: CertificateEnum - # existing - secret_name: typing.Optional[str] - # lets-encrypt - acme_email: typing.Optional[str] - acme_server: typing.Optional[str] - - -# ========== Default Images ============== - - -class DefaultImages(Base): - jupyterhub: str - jupyterlab: str - dask_worker: str - - -# =========== Authentication ============== - - -class GitHubConfig(Base): - client_id: str - client_secret: str - - -class Auth0Config(Base): - client_id: str - client_secret: str - auth0_subdomain: str - - -class Authentication(Base, ABC): - _types: typing.Dict[str, type] = {} - - type: AuthenticationEnum - - # Based on https://github.com/samuelcolvin/pydantic/issues/2177#issuecomment-739578307 - - # This allows type field to determine which subclass of Authentication should be used for validation. - - # Used to register automatically all the submodels in `_types`. - def __init_subclass__(cls): - cls._types[cls._typ.value] = cls - - @classmethod - def __get_validators__(cls): - yield cls.validate - - @classmethod - def validate(cls, value: typing.Dict[str, typing.Any]) -> "Authentication": - if "type" not in value: - raise ValueError("type field is missing from security.authentication") - - specified_type = value.get("type") - sub_class = cls._types.get(specified_type, None) - - if not sub_class: - raise ValueError( - f"No registered Authentication type called {specified_type}" - ) - - # init with right submodel - return sub_class(**value) - - -class PasswordAuthentication(Authentication): - _typ = AuthenticationEnum.password - - -class Auth0Authentication(Authentication): - _typ = AuthenticationEnum.auth0 - config: Auth0Config - - -class GitHubAuthentication(Authentication): - _typ = AuthenticationEnum.github - config: GitHubConfig - - -# ================= Keycloak ================== - - -class Keycloak(Base): - initial_root_password: typing.Optional[str] - overrides: typing.Optional[typing.Dict] - realm_display_name: typing.Optional[str] - - -# ============== Security ================ - - -class Security(Base): - authentication: Authentication - shared_users_group: typing.Optional[bool] - keycloak: typing.Optional[Keycloak] - - -# ================ Providers =============== - - -class KeyValueDict(Base): - key: str - value: str - - -class NodeSelector(Base): - general: KeyValueDict - user: KeyValueDict - worker: KeyValueDict - - -class NodeGroup(Base): - instance: str - min_nodes: int - max_nodes: int - gpu: typing.Optional[bool] = False - guest_accelerators: typing.Optional[typing.List[typing.Dict]] = [] - - class Config: - extra = "allow" - - @validator("guest_accelerators") - def validate_guest_accelerators(cls, v): - if not v: - return v - if not isinstance(v, list): - raise ValueError("guest_accelerators must be a list") - for i in v: - assertion_error_message = """ - In order to successfully use guest accelerators, you must specify the following parameters: - - name (str): Machine type name of the GPU, available at https://cloud.google.com/compute/docs/gpus - count (int): Number of GPUs to attach to the instance - - See general information regarding GPU support at: - # TODO: replace with nebari.dev new URL - https://docs.nebari.dev/en/stable/source/admin_guide/gpu.html?#add-gpu-node-group - """ - try: - assert "name" in i and "count" in i - assert isinstance(i["name"], str) and isinstance(i["count"], int) - except AssertionError: - raise ValueError(assertion_error_message) - - -class AWSNodeGroup(NodeGroup): - single_subnet: typing.Optional[bool] = False - - -class DigitalOceanProvider(Base): - region: str - kubernetes_version: str - node_groups: typing.Dict[str, NodeGroup] - terraform_overrides: typing.Any - - -class GoogleCloudPlatformProvider(Base): - project: str - region: str - zone: typing.Optional[str] # No longer used - availability_zones: typing.Optional[typing.List[str]] # Genuinely optional - kubernetes_version: str - release_channel: typing.Optional[str] - node_groups: typing.Dict[str, NodeGroup] - terraform_overrides: typing.Any - - -class AzureProvider(Base): - region: str - kubernetes_version: str - node_groups: typing.Dict[str, NodeGroup] - storage_account_postfix: str - terraform_overrides: typing.Any - - -class AmazonWebServicesProvider(Base): - region: str - availability_zones: typing.Optional[typing.List[str]] - kubernetes_version: str - node_groups: typing.Dict[str, AWSNodeGroup] - terraform_overrides: typing.Any - - -class LocalProvider(Base): - kube_context: typing.Optional[str] - node_selectors: typing.Dict[str, KeyValueDict] - - -class ExistingProvider(Base): - kube_context: typing.Optional[str] - node_selectors: typing.Dict[str, KeyValueDict] - - -# ================= Theme ================== - - -class Theme(Base): - jupyterhub: typing.Dict[str, typing.Union[str, list]] - - -# ================= Theme ================== - - -class JupyterHub(Base): - overrides: typing.Optional[typing.Dict] - - -# ================= JupyterLab ================== - - -class IdleCuller(Base): - terminal_cull_inactive_timeout: typing.Optional[int] - terminal_cull_interval: typing.Optional[int] - kernel_cull_idle_timeout: typing.Optional[int] - kernel_cull_interval: typing.Optional[int] - kernel_cull_connected: typing.Optional[bool] - kernel_cull_busy: typing.Optional[int] - server_shutdown_no_activity_timeout: typing.Optional[int] - - -class JupyterLab(Base): - idle_culler: typing.Optional[IdleCuller] - - -# ================== Profiles ================== - - -class KubeSpawner(Base): - cpu_limit: int - cpu_guarantee: int - mem_limit: str - mem_guarantee: str - image: typing.Optional[str] - - class Config: - extra = "allow" - - -class JupyterLabProfile(Base): - access: AccessEnum = AccessEnum.all - display_name: str - description: str - default: typing.Optional[bool] - users: typing.Optional[typing.List[str]] - groups: typing.Optional[typing.List[str]] - kubespawner_override: typing.Optional[KubeSpawner] - - @root_validator - def only_yaml_can_have_groups_and_users(cls, values): - if values["access"] != AccessEnum.yaml: - if ( - values.get("users", None) is not None - or values.get("groups", None) is not None - ): - raise ValueError( - "Profile must not contain groups or users fields unless access = yaml" - ) - return values - - -class DaskWorkerProfile(Base): - worker_cores_limit: int - worker_cores: int - worker_memory_limit: str - worker_memory: str - image: typing.Optional[str] - - class Config: - extra = "allow" - - -class Profiles(Base): - jupyterlab: typing.List[JupyterLabProfile] - dask_worker: typing.Dict[str, DaskWorkerProfile] - - @validator("jupyterlab") - def check_default(cls, v, values): - """Check if only one default value is present.""" - default = [attrs["default"] for attrs in v if "default" in attrs] - if default.count(True) > 1: - raise TypeError( - "Multiple default Jupyterlab profiles may cause unexpected problems." - ) - return v - - -# ================ Environment ================ - - -class CondaEnvironment(Base): - name: str - channels: typing.Optional[typing.List[str]] - dependencies: typing.List[typing.Union[str, typing.Dict[str, typing.List[str]]]] - - -# =============== CDSDashboards ============== - - -class CDSDashboards(Base): - enabled: bool - cds_hide_user_named_servers: typing.Optional[bool] - cds_hide_user_dashboard_servers: typing.Optional[bool] - - -# =============== Extensions = = ============== - - -class NebariExtensionEnv(Base): - name: str - value: str - - -class NebariExtension(Base): - name: str - image: str - urlslug: str - private: bool = False - oauth2client: bool = False - keycloakadmin: bool = False - jwt: bool = False - nebariconfigyaml: bool = False - logout: typing.Optional[str] - envs: typing.Optional[typing.List[NebariExtensionEnv]] - - -class Ingress(Base): - terraform_overrides: typing.Any - - -# ======== External Container Registry ======== - -# This allows the user to set a private AWS ECR as a replacement for -# Docker Hub for some images - those where you provide the full path -# to the image on the ECR. -# extcr_account and extcr_region are the AWS account number and region -# of the ECR respectively. access_key_id and secret_access_key are -# AWS access keys that should have read access to the ECR. - - -class ExtContainerReg(Base): - enabled: bool - access_key_id: typing.Optional[str] - secret_access_key: typing.Optional[str] - extcr_account: typing.Optional[str] - extcr_region: typing.Optional[str] - - @root_validator - def enabled_must_have_fields(cls, values): - if values["enabled"]: - for fldname in ( - "access_key_id", - "secret_access_key", - "extcr_account", - "extcr_region", - ): - if ( - fldname not in values - or values[fldname] is None - or values[fldname].strip() == "" - ): - raise ValueError( - f"external_container_reg must contain a non-blank {fldname} when enabled is true" - ) - return values - - -# ==================== Main =================== -letter_dash_underscore_pydantic = pydantic.constr(regex=namestr_regex) - - -def project_name_convention(value: typing.Any, values): - convention = """ - There are some project naming conventions which need to be followed. - First, ensure your name is compatible with the specific one for - your chosen Cloud provider. In addition, the project name should also obey the following - format requirements: - - Letters from A to Z (upper and lower case) and numbers; - - Maximum accepted length of the name string is 16 characters. - - If using AWS: names should not start with the string "aws"; - - If using Azure: names should not contain "-". - """ - if len(value) > 16: - raise ValueError( - "\n".join( - [ - convention, - "Maximum accepted length of the project name string is 16 characters.", - ] - ) - ) - elif values["provider"] == "azure" and ("-" in value): - raise ValueError( - "\n".join( - [convention, "Provider [azure] does not allow '-' in project name."] - ) - ) - elif values["provider"] == "aws" and value.startswith("aws"): - raise ValueError( - "\n".join( - [ - convention, - "Provider [aws] does not allow 'aws' as starting sequence in project name.", - ] - ) - ) - else: - return letter_dash_underscore_pydantic - - -class InitInputs(Base): - cloud_provider: typing.Type[ProviderEnum] = "local" - project_name: str = "" - domain_name: str = "" - namespace: typing.Optional[letter_dash_underscore_pydantic] = "dev" - auth_provider: typing.Type[AuthenticationEnum] = "password" - auth_auto_provision: bool = False - repository: typing.Union[str, None] = None - repository_auto_provision: bool = False - ci_provider: typing.Optional[CiEnum] = None - terraform_state: typing.Optional[TerraformStateEnum] = "remote" - kubernetes_version: typing.Union[str, None] = None - ssl_cert_email: typing.Union[str, None] = None - disable_prompt: bool = False - - -class Main(Base): - provider: ProviderEnum - project_name: str - namespace: typing.Optional[letter_dash_underscore_pydantic] - nebari_version: str = "" - ci_cd: typing.Optional[CICD] - domain: str - terraform_state: typing.Optional[TerraformState] - certificate: Certificate - helm_extensions: typing.Optional[typing.List[HelmExtension]] - prefect: typing.Optional[Prefect] - cdsdashboards: CDSDashboards - security: Security - external_container_reg: typing.Optional[ExtContainerReg] - default_images: DefaultImages - storage: typing.Dict[str, str] - local: typing.Optional[LocalProvider] - existing: typing.Optional[ExistingProvider] - google_cloud_platform: typing.Optional[GoogleCloudPlatformProvider] - amazon_web_services: typing.Optional[AmazonWebServicesProvider] - azure: typing.Optional[AzureProvider] - digital_ocean: typing.Optional[DigitalOceanProvider] - theme: Theme - profiles: Profiles - environments: typing.Dict[str, CondaEnvironment] - conda_store: typing.Optional[CondaStore] - argo_workflows: typing.Optional[ArgoWorkflows] - kbatch: typing.Optional[KBatch] - monitoring: typing.Optional[Monitoring] - clearml: typing.Optional[ClearML] - tf_extensions: typing.Optional[typing.List[NebariExtension]] - jupyterhub: typing.Optional[JupyterHub] - jupyterlab: typing.Optional[JupyterLab] - prevent_deploy: bool = ( - False # Optional, but will be given default value if not present - ) - ingress: typing.Optional[Ingress] - - # If the nebari_version in the schema is old - # we must tell the user to first run nebari upgrade - @validator("nebari_version", pre=True, always=True) - def check_default(cls, v): - """ - Always called even if nebari_version is not supplied at all (so defaults to ''). That way we can give a more helpful error message. - """ - if not cls.is_version_accepted(v): - if v == "": - v = "not supplied" - raise ValueError( - f"nebari_version in the config file must be equivalent to {__version__} to be processed by this version of nebari (your config file version is {v})." - " Install a different version of nebari or run nebari upgrade to ensure your config file is compatible." - ) - return v - - @classmethod - def is_version_accepted(cls, v): - return v != "" and rounded_ver_parse(v) == rounded_ver_parse(__version__) - - @validator("project_name") - def _project_name_convention(cls, value: typing.Any, values): - project_name_convention(value=value, values=values) - - -def verify(config): - return Main(**config) - - -def is_version_accepted(v): - """ - Given a version string, return boolean indicating whether - nebari_version in the nebari-config.yaml would be acceptable - for deployment with the current Nebari package. - """ - return Main.is_version_accepted(v) diff --git a/src/_nebari/stages/base.py b/src/_nebari/stages/base.py new file mode 100644 index 000000000..d15e67d21 --- /dev/null +++ b/src/_nebari/stages/base.py @@ -0,0 +1,112 @@ +import contextlib +import inspect +import os +import pathlib +from typing import Any, Dict, List, Tuple + +from _nebari.provider import terraform +from _nebari.stages.tf_objects import NebariTerraformState +from nebari.hookspecs import NebariStage + + +class NebariTerraformStage(NebariStage): + @property + def template_directory(self): + return pathlib.Path(inspect.getfile(self.__class__)).parent / "template" + + @property + def stage_prefix(self): + return pathlib.Path("stages") / self.name + + def state_imports(self) -> List[Tuple[str, str]]: + return [] + + def tf_objects(self) -> List[Dict]: + return [NebariTerraformState(self.name, self.config)] + + def render(self) -> Dict[pathlib.Path, str]: + contents = { + (self.stage_prefix / "_nebari.tf.json"): terraform.tf_render_objects( + self.tf_objects() + ) + } + for root, dirs, filenames in os.walk(self.template_directory): + for filename in filenames: + root_filename = pathlib.Path(root) / filename + with root_filename.open("rb") as f: + contents[ + pathlib.Path( + self.stage_prefix, + pathlib.Path.relative_to( + pathlib.Path(root_filename), self.template_directory + ), + ) + ] = f.read() + return contents + + def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): + return {} + + def set_outputs( + self, stage_outputs: Dict[str, Dict[str, Any]], outputs: Dict[str, Any] + ): + stage_key = "stages/" + self.name + if stage_key not in stage_outputs: + stage_outputs[stage_key] = {**outputs} + else: + stage_outputs[stage_key].update(outputs) + + @contextlib.contextmanager + def deploy(self, stage_outputs: Dict[str, Dict[str, Any]]): + deploy_config = dict( + directory=str(self.output_directory / self.stage_prefix), + input_vars=self.input_vars(stage_outputs), + ) + state_imports = self.state_imports() + if state_imports: + deploy_config["terraform_import"] = True + deploy_config["state_imports"] = state_imports + + self.set_outputs(stage_outputs, terraform.deploy(**deploy_config)) + self.post_deploy(stage_outputs) + yield + + def post_deploy(self, stage_outputs: Dict[str, Dict[str, Any]]): + pass + + def check(self, stage_outputs: Dict[str, Dict[str, Any]]): + pass + + @contextlib.contextmanager + def destroy( + self, + stage_outputs: Dict[str, Dict[str, Any]], + status: Dict[str, bool], + ignore_errors: bool = True, + ): + self.set_outputs( + stage_outputs, + terraform.deploy( + directory=str(self.output_directory / self.stage_prefix), + input_vars=self.input_vars(stage_outputs), + terraform_init=True, + terraform_import=True, + terraform_apply=False, + terraform_destroy=False, + ), + ) + yield + try: + terraform.deploy( + directory=str(self.output_directory / self.stage_prefix), + input_vars=self.input_vars(stage_outputs), + terraform_init=True, + terraform_import=True, + terraform_apply=False, + terraform_destroy=True, + ) + status["stages/" + self.name] = True + except terraform.TerraformException as e: + if not ignore_errors: + raise e + status["stages/" + self.name] = False diff --git a/src/_nebari/stages/bootstrap/__init__.py b/src/_nebari/stages/bootstrap/__init__.py new file mode 100644 index 000000000..873ab33de --- /dev/null +++ b/src/_nebari/stages/bootstrap/__init__.py @@ -0,0 +1,112 @@ +import enum +import io +import pathlib +import typing +from inspect import cleandoc +from typing import Dict, List + +from _nebari.provider.cicd.github import gen_nebari_linter, gen_nebari_ops +from _nebari.provider.cicd.gitlab import gen_gitlab_ci +from nebari import schema +from nebari.hookspecs import NebariStage, hookimpl + + +def gen_gitignore(): + """ + Generate `.gitignore` file. + Add files as needed. + """ + filestoignore = """ + # ignore terraform state + .terraform + terraform.tfstate + terraform.tfstate.backup + .terraform.tfstate.lock.info + + # python + __pycache__ + """ + return {pathlib.Path(".gitignore"): cleandoc(filestoignore)} + + +def gen_cicd(config: schema.Main): + """ + Use cicd schema to generate workflow files based on the + `ci_cd` key in the `config`. + + For more detail on schema: + GiHub-Actions - nebari/providers/cicd/github.py + GitLab-CI - nebari/providers/cicd/gitlab.py + """ + cicd_files = {} + + if config.ci_cd.type == CiEnum.github_actions: + gha_dir = pathlib.Path(".github/workflows/") + cicd_files[gha_dir / "nebari-ops.yaml"] = gen_nebari_ops(config) + cicd_files[gha_dir / "nebari-linter.yaml"] = gen_nebari_linter(config) + + elif config.ci_cd.type == CiEnum.gitlab_ci: + cicd_files[pathlib.Path(".gitlab-ci.yml")] = gen_gitlab_ci(config) + + else: + raise ValueError( + f"The ci_cd provider, {config.ci_cd.type.value}, is not supported. Supported providers include: `github-actions`, `gitlab-ci`." + ) + + return cicd_files + + +@schema.yaml_object(schema.yaml) +class CiEnum(str, enum.Enum): + github_actions = "github-actions" + gitlab_ci = "gitlab-ci" + none = "none" + + @classmethod + def to_yaml(cls, representer, node): + return representer.represent_str(node.value) + + +class CICD(schema.Base): + type: CiEnum = CiEnum.none + branch: str = "main" + commit_render: bool = True + before_script: typing.List[typing.Union[str, typing.Dict]] = [] + after_script: typing.List[typing.Union[str, typing.Dict]] = [] + + +class InputSchema(schema.Base): + ci_cd: CICD = CICD() + + +class OutputSchema(schema.Base): + pass + + +class BootstrapStage(NebariStage): + name = "bootstrap" + priority = 0 + + input_schema = InputSchema + output_schema = OutputSchema + + def render(self) -> Dict[str, str]: + contents = {} + if self.config.ci_cd.type != CiEnum.none: + for fn, workflow in gen_cicd(self.config).items(): + stream = io.StringIO() + schema.yaml.dump( + workflow.dict( + by_alias=True, exclude_unset=True, exclude_defaults=True + ), + stream, + ) + contents.update({fn: stream.getvalue()}) + + contents.update(gen_gitignore()) + return contents + + +@hookimpl +def nebari_stage() -> List[NebariStage]: + return [BootstrapStage] diff --git a/src/_nebari/stages/checks.py b/src/_nebari/stages/checks.py deleted file mode 100644 index 86259846a..000000000 --- a/src/_nebari/stages/checks.py +++ /dev/null @@ -1,315 +0,0 @@ -import socket -import sys -import time - -# check and retry settings -NUM_ATTEMPTS = 10 -TIMEOUT = 10 # seconds - - -def stage_02_infrastructure(stage_outputs, nebari_config): - from kubernetes import client, config - from kubernetes.client.rest import ApiException - - directory = "stages/02-infrastructure" - config.load_kube_config( - config_file=stage_outputs["stages/02-infrastructure"]["kubeconfig_filename"][ - "value" - ] - ) - - try: - api_instance = client.CoreV1Api() - result = api_instance.list_namespace() - except ApiException: - print( - f"ERROR: After stage directory={directory} unable to connect to kubernetes cluster" - ) - sys.exit(1) - - if len(result.items) < 1: - print( - f"ERROR: After stage directory={directory} no nodes provisioned within kubernetes cluster" - ) - sys.exit(1) - - print( - f"After stage directory={directory} kubernetes cluster successfully provisioned" - ) - - -def stage_03_kubernetes_initialize(stage_outputs, nebari_config): - from kubernetes import client, config - from kubernetes.client.rest import ApiException - - directory = "stages/03-kubernetes-initialize" - config.load_kube_config( - config_file=stage_outputs["stages/02-infrastructure"]["kubeconfig_filename"][ - "value" - ] - ) - - try: - api_instance = client.CoreV1Api() - result = api_instance.list_namespace() - except ApiException: - print( - f"ERROR: After stage directory={directory} unable to connect to kubernetes cluster" - ) - sys.exit(1) - - namespaces = {_.metadata.name for _ in result.items} - if nebari_config["namespace"] not in namespaces: - print( - f"ERROR: After stage directory={directory} namespace={config['namespace']} not provisioned within kubernetes cluster" - ) - sys.exit(1) - - print(f"After stage directory={directory} kubernetes initialized successfully") - - -def stage_04_kubernetes_ingress(stage_outputs, nebari_config): - directory = "stages/04-kubernetes-ingress" - - def _attempt_tcp_connect(host, port, num_attempts=NUM_ATTEMPTS, timeout=TIMEOUT): - for i in range(num_attempts): - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - try: - # normalize hostname to ip address - ip = socket.gethostbyname(host) - s.settimeout(5) - result = s.connect_ex((ip, port)) - if result == 0: - print(f"Attempt {i+1} succeeded to connect to tcp://{ip}:{port}") - return True - print(f"Attempt {i+1} failed to connect to tcp tcp://{ip}:{port}") - except socket.gaierror: - print(f"Attempt {i+1} failed to get IP for {host}...") - finally: - s.close() - - time.sleep(timeout) - - return False - - tcp_ports = { - 80, # http - 443, # https - 8022, # jupyterhub-ssh ssh - 8023, # jupyterhub-ssh sftp - 9080, # minio - 8786, # dask-scheduler - } - ip_or_name = stage_outputs[directory]["load_balancer_address"]["value"] - host = ip_or_name["hostname"] or ip_or_name["ip"] - host = host.strip("\n") - - for port in tcp_ports: - if not _attempt_tcp_connect(host, port): - print( - f"ERROR: After stage directory={directory} unable to connect to ingress host={host} port={port}" - ) - sys.exit(1) - - print( - f"After stage directory={directory} kubernetes ingress available on tcp ports={tcp_ports}" - ) - - -def check_ingress_dns(stage_outputs, config, disable_prompt): - directory = "stages/04-kubernetes-ingress" - - ip_or_name = stage_outputs[directory]["load_balancer_address"]["value"] - ip = socket.gethostbyname(ip_or_name["hostname"] or ip_or_name["ip"]) - domain_name = config["domain"] - - def _attempt_dns_lookup( - domain_name, ip, num_attempts=NUM_ATTEMPTS, timeout=TIMEOUT - ): - for i in range(num_attempts): - try: - resolved_ip = socket.gethostbyname(domain_name) - if resolved_ip == ip: - print( - f"DNS configured domain={domain_name} matches ingress ip={ip}" - ) - return True - else: - print( - f"Attempt {i+1} polling DNS domain={domain_name} does not match ip={ip} instead got {resolved_ip}" - ) - except socket.gaierror: - print( - f"Attempt {i+1} polling DNS domain={domain_name} record does not exist" - ) - time.sleep(timeout) - return False - - attempt = 0 - while not _attempt_dns_lookup(domain_name, ip): - sleeptime = 60 * (2**attempt) - if not disable_prompt: - input( - f"After attempting to poll the DNS, the record for domain={domain_name} appears not to exist, " - f"has recently been updated, or has yet to fully propagate. This non-deterministic behavior is likely due to " - f"DNS caching and will likely resolve itself in a few minutes.\n\n\tTo poll the DNS again in {sleeptime} seconds " - f"[Press Enter].\n\n...otherwise kill the process and run the deployment again later..." - ) - - print(f"Will attempt to poll DNS again in {sleeptime} seconds...") - time.sleep(sleeptime) - attempt += 1 - if attempt == 5: - print( - f"ERROR: After stage directory={directory} DNS domain={domain_name} does not point to ip={ip}" - ) - sys.exit(1) - - -def stage_05_kubernetes_keycloak(stage_outputs, config): - directory = "stages/05-kubernetes-keycloak" - - from keycloak import KeycloakAdmin - from keycloak.exceptions import KeycloakError - - keycloak_url = ( - f"{stage_outputs[directory]['keycloak_credentials']['value']['url']}/auth/" - ) - - def _attempt_keycloak_connection( - keycloak_url, - username, - password, - realm_name, - client_id, - verify=False, - num_attempts=NUM_ATTEMPTS, - timeout=TIMEOUT, - ): - for i in range(num_attempts): - try: - KeycloakAdmin( - keycloak_url, - username=username, - password=password, - realm_name=realm_name, - client_id=client_id, - verify=verify, - ) - print(f"Attempt {i+1} succeeded connecting to keycloak master realm") - return True - except KeycloakError as e: - print(e) - print(f"Attempt {i+1} failed connecting to keycloak master realm") - time.sleep(timeout) - return False - - if not _attempt_keycloak_connection( - keycloak_url, - stage_outputs[directory]["keycloak_credentials"]["value"]["username"], - stage_outputs[directory]["keycloak_credentials"]["value"]["password"], - stage_outputs[directory]["keycloak_credentials"]["value"]["realm"], - stage_outputs[directory]["keycloak_credentials"]["value"]["client_id"], - verify=False, - ): - print( - f"ERROR: unable to connect to keycloak master realm at url={keycloak_url} with root credentials" - ) - sys.exit(1) - - print("Keycloak service successfully started") - - -def stage_06_kubernetes_keycloak_configuration(stage_outputs, config): - directory = "stages/05-kubernetes-keycloak" - - from keycloak import KeycloakAdmin - from keycloak.exceptions import KeycloakError - - keycloak_url = ( - f"{stage_outputs[directory]['keycloak_credentials']['value']['url']}/auth/" - ) - - def _attempt_keycloak_connection( - keycloak_url, - username, - password, - realm_name, - client_id, - nebari_realm, - verify=False, - num_attempts=NUM_ATTEMPTS, - timeout=TIMEOUT, - ): - for i in range(num_attempts): - try: - realm_admin = KeycloakAdmin( - keycloak_url, - username=username, - password=password, - realm_name=realm_name, - client_id=client_id, - verify=verify, - ) - existing_realms = {_["id"] for _ in realm_admin.get_realms()} - if nebari_realm in existing_realms: - print( - f"Attempt {i+1} succeeded connecting to keycloak and nebari realm={nebari_realm} exists" - ) - return True - else: - print( - f"Attempt {i+1} succeeded connecting to keycloak but nebari realm did not exist" - ) - except KeycloakError: - print(f"Attempt {i+1} failed connecting to keycloak master realm") - time.sleep(timeout) - return False - - if not _attempt_keycloak_connection( - keycloak_url, - stage_outputs[directory]["keycloak_credentials"]["value"]["username"], - stage_outputs[directory]["keycloak_credentials"]["value"]["password"], - stage_outputs[directory]["keycloak_credentials"]["value"]["realm"], - stage_outputs[directory]["keycloak_credentials"]["value"]["client_id"], - nebari_realm=stage_outputs["stages/06-kubernetes-keycloak-configuration"][ - "realm_id" - ]["value"], - verify=False, - ): - print( - "ERROR: unable to connect to keycloak master realm and ensure that nebari realm exists" - ) - sys.exit(1) - - print("Keycloak service successfully started with nebari realm") - - -def stage_07_kubernetes_services(stage_outputs, config): - directory = "stages/07-kubernetes-services" - import requests - - # suppress insecure warnings - import urllib3 - - urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) - - def _attempt_connect_url( - url, verify=False, num_attempts=NUM_ATTEMPTS, timeout=TIMEOUT - ): - for i in range(num_attempts): - response = requests.get(url, verify=verify, timeout=timeout) - if response.status_code < 400: - print(f"Attempt {i+1} health check succeeded for url={url}") - return True - else: - print(f"Attempt {i+1} health check failed for url={url}") - time.sleep(timeout) - return False - - services = stage_outputs[directory]["service_urls"]["value"] - for service_name, service in services.items(): - service_url = service["health_url"] - if service_url and not _attempt_connect_url(service_url): - print(f"ERROR: Service {service_name} DOWN when checking url={service_url}") - sys.exit(1) diff --git a/src/_nebari/stages/infrastructure/__init__.py b/src/_nebari/stages/infrastructure/__init__.py new file mode 100644 index 000000000..70ad37153 --- /dev/null +++ b/src/_nebari/stages/infrastructure/__init__.py @@ -0,0 +1,770 @@ +import contextlib +import inspect +import os +import pathlib +import sys +import tempfile +import typing +from typing import Any, Dict, List, Optional + +import pydantic + +from _nebari import constants +from _nebari.provider import terraform +from _nebari.provider.cloud import ( + amazon_web_services, + azure_cloud, + digital_ocean, + google_cloud, +) +from _nebari.stages.base import NebariTerraformStage +from _nebari.stages.tf_objects import NebariTerraformState +from _nebari.utils import modified_environ, random_secure_string +from nebari import schema +from nebari.hookspecs import NebariStage, hookimpl + + +def get_kubeconfig_filename(): + return str(pathlib.Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG") + + +class LocalInputVars(schema.Base): + kubeconfig_filename: str = get_kubeconfig_filename() + kube_context: Optional[str] + + +class ExistingInputVars(schema.Base): + kube_context: str + + +class DigitalOceanNodeGroup(schema.Base): + instance: str + min_nodes: int + max_nodes: int + + +class DigitalOceanInputVars(schema.Base): + name: str + environment: str + region: str + tags: typing.List[str] + kubernetes_version: str + node_groups: typing.Dict[str, DigitalOceanNodeGroup] + kubeconfig_filename: str = get_kubeconfig_filename() + + +class GCPGuestAccelerators(schema.Base): + type: str + count: int + + +class GCPNodeGroupInputVars(schema.Base): + name: str + instance_type: str + min_size: int + max_size: int + labels: Dict[str, str] + preemptible: bool + guest_accelerators: List[GCPGuestAccelerators] + + +class GCPPrivateClusterConfig(schema.Base): + enable_private_nodes: bool + enable_private_endpoint: bool + master_ipv4_cidr_block: str + + +class GCPInputVars(schema.Base): + name: str + environment: str + region: str + project_id: str + availability_zones: List[str] + node_groups: List[GCPNodeGroupInputVars] + kubeconfig_filename: str = get_kubeconfig_filename() + tags: List[str] + kubernetes_version: str + release_channel: str + networking_mode: str + network: str + subnetwork: str = None + ip_allocation_policy: Dict[str, str] = None + master_authorized_networks_config: Dict[str, str] = None + private_cluster_config: GCPPrivateClusterConfig = None + + +class AzureNodeGroupInputVars(schema.Base): + instance: str + min_nodes: int + max_nodes: int + + +class AzureInputVars(schema.Base): + name: str + environment: str + region: str + kubeconfig_filename: str = get_kubeconfig_filename() + kubernetes_version: str + node_groups: Dict[str, AzureNodeGroupInputVars] + resource_group_name: str + node_resource_group_name: str + vnet_subnet_id: str = None + private_cluster_enabled: bool + + +class AWSNodeGroupInputVars(schema.Base): + name: str + instance_type: str + gpu: bool = False + min_size: int + desired_size: int + max_size: int + single_subnet: bool + + +class AWSInputVars(schema.Base): + name: str + environment: str + existing_security_group_id: str = None + existing_subnet_ids: List[str] = None + region: str + kubernetes_version: str + node_groups: List[AWSNodeGroupInputVars] + availability_zones: List[str] + vpc_cidr_block: str + kubeconfig_filename: str = get_kubeconfig_filename() + + +def _calculate_node_groups(config: schema.Main): + if config.provider == schema.ProviderEnum.aws: + return { + group: {"key": "eks.amazonaws.com/nodegroup", "value": group} + for group in ["general", "user", "worker"] + } + elif config.provider == schema.ProviderEnum.gcp: + return { + group: {"key": "cloud.google.com/gke-nodepool", "value": group} + for group in ["general", "user", "worker"] + } + elif config.provider == schema.ProviderEnum.azure: + return { + group: {"key": "azure-node-pool", "value": group} + for group in ["general", "user", "worker"] + } + elif config.provider == schema.ProviderEnum.do: + return { + group: {"key": "doks.digitalocean.com/node-pool", "value": group} + for group in ["general", "user", "worker"] + } + elif config.provider == schema.ProviderEnum.existing: + return config.existing.node_selectors + else: + return config.local.dict()["node_selectors"] + + +@contextlib.contextmanager +def kubernetes_provider_context(kubernetes_credentials: Dict[str, str]): + credential_mapping = { + "config_path": "KUBE_CONFIG_PATH", + "config_context": "KUBE_CTX", + "username": "KUBE_USER", + "password": "KUBE_PASSWORD", + "client_certificate": "KUBE_CLIENT_CERT_DATA", + "client_key": "KUBE_CLIENT_KEY_DATA", + "cluster_ca_certificate": "KUBE_CLUSTER_CA_CERT_DATA", + "host": "KUBE_HOST", + "token": "KUBE_TOKEN", + } + + credentials = { + credential_mapping[k]: v + for k, v in kubernetes_credentials.items() + if v is not None + } + with modified_environ(**credentials): + yield + + +class KeyValueDict(schema.Base): + key: str + value: str + + +class DigitalOceanNodeGroup(schema.Base): + """Representation of a node group with Digital Ocean + + - Kubernetes limits: https://docs.digitalocean.com/products/kubernetes/details/limits/ + - Available instance types: https://slugs.do-api.dev/ + """ + + instance: str + min_nodes: pydantic.conint(ge=1) = 1 + max_nodes: pydantic.conint(ge=1) = 1 + + +class DigitalOceanProvider(schema.Base): + region: str = "nyc3" + kubernetes_version: typing.Optional[str] + # Digital Ocean image slugs are listed here https://slugs.do-api.dev/ + node_groups: typing.Dict[str, DigitalOceanNodeGroup] = { + "general": DigitalOceanNodeGroup( + instance="g-8vcpu-32gb", min_nodes=1, max_nodes=1 + ), + "user": DigitalOceanNodeGroup( + instance="g-4vcpu-16gb", min_nodes=1, max_nodes=5 + ), + "worker": DigitalOceanNodeGroup( + instance="g-4vcpu-16gb", min_nodes=1, max_nodes=5 + ), + } + tags: typing.Optional[typing.List[str]] = [] + + @pydantic.validator("region") + def _validate_region(cls, value): + digital_ocean.check_credentials() + + available_regions = set(_["slug"] for _ in digital_ocean.regions()) + if value not in available_regions: + raise ValueError( + f"Digital Ocean region={value} is not one of {available_regions}" + ) + return value + + @pydantic.validator("node_groups") + def _validate_node_group(cls, value): + digital_ocean.check_credentials() + + available_instances = {_["slug"] for _ in digital_ocean.instances()} + for name, node_group in value.items(): + if node_group.instance not in available_instances: + raise ValueError( + f"Digital Ocean instance {node_group.instance} not one of available instance types={available_instances}" + ) + + return value + + @pydantic.root_validator + def _validate_kubernetes_version(cls, values): + digital_ocean.check_credentials() + + if "region" not in values: + raise ValueError("Region required in order to set kubernetes_version") + + available_kubernetes_versions = digital_ocean.kubernetes_versions( + values["region"] + ) + if ( + values["kubernetes_version"] is not None + and values["kubernetes_version"] not in available_kubernetes_versions + ): + raise ValueError( + f"\nInvalid `kubernetes-version` provided: {values['kubernetes_version']}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." + ) + else: + values["kubernetes_version"] = available_kubernetes_versions[-1] + return values + + +class GCPIPAllocationPolicy(schema.Base): + cluster_secondary_range_name: str + services_secondary_range_name: str + cluster_ipv4_cidr_block: str + services_ipv4_cidr_block: str + + +class GCPCIDRBlock(schema.Base): + cidr_block: str + display_name: str + + +class GCPMasterAuthorizedNetworksConfig(schema.Base): + cidr_blocks: typing.List[GCPCIDRBlock] + + +class GCPPrivateClusterConfig(schema.Base): + enable_private_endpoint: bool + enable_private_nodes: bool + master_ipv4_cidr_block: str + + +class GCPGuestAccelerator(schema.Base): + """ + See general information regarding GPU support at: + # TODO: replace with nebari.dev new URL + https://docs.nebari.dev/en/stable/source/admin_guide/gpu.html?#add-gpu-node-group + """ + + name: str + count: pydantic.conint(ge=1) = 1 + + +class GCPNodeGroup(schema.Base): + instance: str + min_nodes: pydantic.conint(ge=0) = 0 + max_nodes: pydantic.conint(ge=1) = 1 + preemptible: bool = False + labels: typing.Dict[str, str] = {} + guest_accelerators: typing.List[GCPGuestAccelerator] = [] + + +class GoogleCloudPlatformProvider(schema.Base): + project: str = pydantic.Field(default_factory=lambda: os.environ["PROJECT_ID"]) + region: str = "us-central1" + availability_zones: typing.Optional[typing.List[str]] = [] + kubernetes_version: typing.Optional[str] + release_channel: str = constants.DEFAULT_GKE_RELEASE_CHANNEL + node_groups: typing.Dict[str, GCPNodeGroup] = { + "general": GCPNodeGroup(instance="n1-standard-8", min_nodes=1, max_nodes=1), + "user": GCPNodeGroup(instance="n1-standard-4", min_nodes=0, max_nodes=5), + "worker": GCPNodeGroup(instance="n1-standard-4", min_nodes=0, max_nodes=5), + } + tags: typing.Optional[typing.List[str]] = [] + networking_mode: str = "ROUTE" + network: str = "default" + subnetwork: typing.Optional[typing.Union[str, None]] = None + ip_allocation_policy: typing.Optional[ + typing.Union[GCPIPAllocationPolicy, None] + ] = None + master_authorized_networks_config: typing.Optional[ + typing.Union[GCPCIDRBlock, None] + ] = None + private_cluster_config: typing.Optional[ + typing.Union[GCPPrivateClusterConfig, None] + ] = None + + @pydantic.root_validator + def _validate_kubernetes_version(cls, values): + google_cloud.check_credentials() + + available_kubernetes_versions = google_cloud.kubernetes_versions( + values["region"] + ) + if ( + values["kubernetes_version"] is not None + and values["kubernetes_version"] not in available_kubernetes_versions + ): + raise ValueError( + f"\nInvalid `kubernetes-version` provided: {values['kubernetes_version']}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." + ) + else: + values["kubernetes_version"] = available_kubernetes_versions[-1] + return values + + +class AzureNodeGroup(schema.Base): + instance: str + min_nodes: int + max_nodes: int + + +class AzureProvider(schema.Base): + region: str = "Central US" + kubernetes_version: typing.Optional[str] + node_groups: typing.Dict[str, AzureNodeGroup] = { + "general": AzureNodeGroup(instance="Standard_D8_v3", min_nodes=1, max_nodes=1), + "user": AzureNodeGroup(instance="Standard_D4_v3", min_nodes=0, max_nodes=5), + "worker": AzureNodeGroup(instance="Standard_D4_v3", min_nodes=0, max_nodes=5), + } + storage_account_postfix: str = pydantic.Field( + default_factory=lambda: random_secure_string(length=4) + ) + vnet_subnet_id: typing.Optional[typing.Union[str, None]] = None + private_cluster_enabled: bool = False + + @pydantic.validator("kubernetes_version") + def _validate_kubernetes_version(cls, value): + azure_cloud.check_credentials() + + available_kubernetes_versions = azure_cloud.kubernetes_versions() + if value is None: + value = available_kubernetes_versions[-1] + elif value not in available_kubernetes_versions: + raise ValueError( + f"\nInvalid `kubernetes-version` provided: {value}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." + ) + return value + + +class AWSNodeGroup(schema.Base): + instance: str + min_nodes: int = 0 + max_nodes: int + gpu: bool = False + single_subnet: bool = False + + +class AmazonWebServicesProvider(schema.Base): + region: str = pydantic.Field( + default_factory=lambda: os.environ.get("AWS_DEFAULT_REGION", "us-west-2") + ) + availability_zones: typing.Optional[typing.List[str]] + kubernetes_version: typing.Optional[str] + node_groups: typing.Dict[str, AWSNodeGroup] = { + "general": AWSNodeGroup(instance="m5.2xlarge", min_nodes=1, max_nodes=1), + "user": AWSNodeGroup( + instance="m5.xlarge", min_nodes=1, max_nodes=5, single_subnet=False + ), + "worker": AWSNodeGroup( + instance="m5.xlarge", min_nodes=1, max_nodes=5, single_subnet=False + ), + } + existing_subnet_ids: typing.List[str] = None + existing_security_group_ids: str = None + vpc_cidr_block: str = "10.10.0.0/16" + + @pydantic.root_validator + def _validate_kubernetes_version(cls, values): + amazon_web_services.check_credentials() + + available_kubernetes_versions = amazon_web_services.kubernetes_versions( + values["region"] + ) + if values["kubernetes_version"] is None: + values["kubernetes_version"] = available_kubernetes_versions[-1] + elif values["kubernetes_version"] not in available_kubernetes_versions: + raise ValueError( + f"\nInvalid `kubernetes-version` provided: {values['kubernetes_version']}.\nPlease select from one of the following supported Kubernetes versions: {available_kubernetes_versions} or omit flag to use latest Kubernetes version available." + ) + return values + + @pydantic.validator("node_groups") + def _validate_node_group(cls, value, values): + amazon_web_services.check_credentials() + + available_instances = amazon_web_services.instances(values["region"]) + for name, node_group in value.items(): + if node_group.instance not in available_instances: + raise ValueError( + f"Instance {node_group.instance} not available out of available instances {available_instances.keys()}" + ) + return value + + @pydantic.validator("region") + def _validate_region(cls, value): + amazon_web_services.check_credentials() + + available_regions = amazon_web_services.regions() + if value not in available_regions: + raise ValueError( + f"Region {value} is not one of available regions {available_regions}" + ) + return value + + @pydantic.root_validator + def _validate_availability_zones(cls, values): + amazon_web_services.check_credentials() + + if values["availability_zones"] is None: + zones = amazon_web_services.zones(values["region"]) + values["availability_zones"] = list(sorted(zones))[:2] + return values + + +class LocalProvider(schema.Base): + kube_context: typing.Optional[str] + node_selectors: typing.Dict[str, KeyValueDict] = { + "general": KeyValueDict(key="kubernetes.io/os", value="linux"), + "user": KeyValueDict(key="kubernetes.io/os", value="linux"), + "worker": KeyValueDict(key="kubernetes.io/os", value="linux"), + } + + +class ExistingProvider(schema.Base): + kube_context: typing.Optional[str] + node_selectors: typing.Dict[str, KeyValueDict] = { + "general": KeyValueDict(key="kubernetes.io/os", value="linux"), + "user": KeyValueDict(key="kubernetes.io/os", value="linux"), + "worker": KeyValueDict(key="kubernetes.io/os", value="linux"), + } + + +class InputSchema(schema.Base): + local: typing.Optional[LocalProvider] + existing: typing.Optional[ExistingProvider] + google_cloud_platform: typing.Optional[GoogleCloudPlatformProvider] + amazon_web_services: typing.Optional[AmazonWebServicesProvider] + azure: typing.Optional[AzureProvider] + digital_ocean: typing.Optional[DigitalOceanProvider] + + @pydantic.root_validator + def check_provider(cls, values): + if ( + values["provider"] == schema.ProviderEnum.local + and values.get("local") is None + ): + values["local"] = LocalProvider() + elif ( + values["provider"] == schema.ProviderEnum.existing + and values.get("existing") is None + ): + values["existing"] = ExistingProvider() + elif ( + values["provider"] == schema.ProviderEnum.gcp + and values.get("google_cloud_platform") is None + ): + values["google_cloud_platform"] = GoogleCloudPlatformProvider() + elif ( + values["provider"] == schema.ProviderEnum.aws + and values.get("amazon_web_services") is None + ): + values["amazon_web_services"] = AmazonWebServicesProvider() + elif ( + values["provider"] == schema.ProviderEnum.azure + and values.get("azure") is None + ): + values["azure"] = AzureProvider() + elif ( + values["provider"] == schema.ProviderEnum.do + and values.get("digital_ocean") is None + ): + values["digital_ocean"] = DigitalOceanProvider() + + if ( + sum( + (_ in values and values[_] is not None) + for _ in { + "local", + "existing", + "google_cloud_platform", + "amazon_web_services", + "azure", + "digital_ocean", + } + ) + != 1 + ): + raise ValueError("multiple providers set or wrong provider fields set") + return values + + +class NodeSelectorKeyValue(schema.Base): + key: str + value: str + + +class KubernetesCredentials(schema.Base): + host: str + cluster_ca_certifiate: str + token: typing.Optional[str] + username: typing.Optional[str] + password: typing.Optional[str] + client_certificate: typing.Optional[str] + client_key: typing.Optional[str] + config_path: typing.Optional[str] + config_context: typing.Optional[str] + + +class OutputSchema(schema.Base): + node_selectors: Dict[str, NodeSelectorKeyValue] + kubernetes_credentials: KubernetesCredentials + kubeconfig_filename: str + nfs_endpoint: typing.Optional[str] + + +class KubernetesInfrastructureStage(NebariTerraformStage): + """Generalized method to provision infrastructure. + + After successful deployment the following properties are set on + `stage_outputs[directory]`. + - `kubernetes_credentials` which are sufficient credentials to + connect with the kubernetes provider + - `kubeconfig_filename` which is a path to a kubeconfig that can + be used to connect to a kubernetes cluster + - at least one node running such that resources in the + node_group.general can be scheduled + + At a high level this stage is expected to provision a kubernetes + cluster on a given provider. + """ + + name = "02-infrastructure" + priority = 20 + + input_schema = InputSchema + output_schema = OutputSchema + + @property + def template_directory(self): + return ( + pathlib.Path(inspect.getfile(self.__class__)).parent + / "template" + / self.config.provider.value + ) + + @property + def stage_prefix(self): + return pathlib.Path("stages") / self.name / self.config.provider.value + + def tf_objects(self) -> List[Dict]: + if self.config.provider == schema.ProviderEnum.gcp: + return [ + terraform.Provider( + "google", + project=self.config.google_cloud_platform.project, + region=self.config.google_cloud_platform.region, + ), + NebariTerraformState(self.name, self.config), + ] + elif self.config.provider == schema.ProviderEnum.do: + return [ + NebariTerraformState(self.name, self.config), + ] + elif self.config.provider == schema.ProviderEnum.azure: + return [ + NebariTerraformState(self.name, self.config), + ] + elif self.config.provider == schema.ProviderEnum.aws: + return [ + terraform.Provider( + "aws", region=self.config.amazon_web_services.region + ), + NebariTerraformState(self.name, self.config), + ] + else: + return [] + + def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): + if self.config.provider == schema.ProviderEnum.local: + return LocalInputVars(kube_context=self.config.local.kube_context).dict() + elif self.config.provider == schema.ProviderEnum.existing: + return ExistingInputVars( + kube_context=self.config.existing.kube_context + ).dict() + elif self.config.provider == schema.ProviderEnum.do: + return DigitalOceanInputVars( + name=self.config.escaped_project_name, + environment=self.config.namespace, + region=self.config.digital_ocean.region, + tags=self.config.digital_ocean.tags, + kubernetes_version=self.config.digital_ocean.kubernetes_version, + node_groups=self.config.digital_ocean.node_groups, + ).dict() + elif self.config.provider == schema.ProviderEnum.gcp: + return GCPInputVars( + name=self.config.escaped_project_name, + environment=self.config.namespace, + region=self.config.google_cloud_platform.region, + project_id=self.config.google_cloud_platform.project, + availability_zones=self.config.google_cloud_platform.availability_zones, + node_groups=[ + GCPNodeGroupInputVars( + name=name, + labels=node_group.labels, + instance_type=node_group.instance, + min_size=node_group.min_nodes, + max_size=node_group.max_nodes, + preemptible=node_group.preemptible, + guest_accelerators=node_group.guest_accelerators, + ) + for name, node_group in self.config.google_cloud_platform.node_groups.items() + ], + tags=self.config.google_cloud_platform.tags, + kubernetes_version=self.config.google_cloud_platform.kubernetes_version, + release_channel=self.config.google_cloud_platform.release_channel, + networking_mode=self.config.google_cloud_platform.networking_mode, + network=self.config.google_cloud_platform.network, + subnetwork=self.config.google_cloud_platform.subnetwork, + ip_allocation_policy=self.config.google_cloud_platform.ip_allocation_policy, + master_authorized_networks_config=self.config.google_cloud_platform.master_authorized_networks_config, + private_cluster_config=self.config.google_cloud_platform.private_cluster_config, + ).dict() + elif self.config.provider == schema.ProviderEnum.azure: + return AzureInputVars( + name=self.config.escaped_project_name, + environment=self.config.namespace, + region=self.config.azure.region, + kubernetes_version=self.config.azure.kubernetes_version, + node_groups={ + name: AzureNodeGroupInputVars( + instance=node_group.instance, + min_nodes=node_group.min_nodes, + max_nodes=node_group.max_nodes, + ) + for name, node_group in self.config.azure.node_groups.items() + }, + resource_group_name=f"{self.config.project_name}-{self.config.namespace}", + node_resource_group_name=f"{self.config.project_name}-{self.config.namespace}-node-resource-group", + vnet_subnet_id=self.config.azure.vnet_subnet_id, + private_cluster_enabled=self.config.azure.private_cluster_enabled, + ).dict() + elif self.config.provider == schema.ProviderEnum.aws: + return AWSInputVars( + name=self.config.escaped_project_name, + environment=self.config.namespace, + existing_subnet_ids=self.config.amazon_web_services.existing_subnet_ids, + existing_security_group_id=self.config.amazon_web_services.existing_security_group_ids, + region=self.config.amazon_web_services.region, + kubernetes_version=self.config.amazon_web_services.kubernetes_version, + node_groups=[ + AWSNodeGroupInputVars( + name=name, + instance_type=node_group.instance, + gpu=node_group.gpu, + min_size=node_group.min_nodes, + desired_size=node_group.min_nodes, + max_size=node_group.max_nodes, + single_subnet=node_group.single_subnet, + ) + for name, node_group in self.config.amazon_web_services.node_groups.items() + ], + availability_zones=self.config.amazon_web_services.availability_zones, + vpc_cidr_block=self.config.amazon_web_services.vpc_cidr_block, + ).dict() + else: + raise ValueError(f"Unknown provider: {self.config.provider}") + + def check(self, stage_outputs: Dict[str, Dict[str, Any]]): + from kubernetes import client, config + from kubernetes.client.rest import ApiException + + config.load_kube_config( + config_file=stage_outputs["stages/02-infrastructure"][ + "kubeconfig_filename" + ]["value"] + ) + + try: + api_instance = client.CoreV1Api() + result = api_instance.list_namespace() + except ApiException: + print( + f"ERROR: After stage={self.name} unable to connect to kubernetes cluster" + ) + sys.exit(1) + + if len(result.items) < 1: + print( + f"ERROR: After stage={self.name} no nodes provisioned within kubernetes cluster" + ) + sys.exit(1) + + print(f"After stage={self.name} kubernetes cluster successfully provisioned") + + def set_outputs( + self, stage_outputs: Dict[str, Dict[str, Any]], outputs: Dict[str, Any] + ): + outputs["node_selectors"] = _calculate_node_groups(self.config) + super().set_outputs(stage_outputs, outputs) + + @contextlib.contextmanager + def deploy(self, stage_outputs: Dict[str, Dict[str, Any]]): + with super().deploy(stage_outputs): + with kubernetes_provider_context( + stage_outputs["stages/" + self.name]["kubernetes_credentials"]["value"] + ): + yield + + @contextlib.contextmanager + def destroy( + self, stage_outputs: Dict[str, Dict[str, Any]], status: Dict[str, bool] + ): + with super().destroy(stage_outputs, status): + with kubernetes_provider_context( + stage_outputs["stages/" + self.name]["kubernetes_credentials"]["value"] + ): + yield + + +@hookimpl +def nebari_stage() -> List[NebariStage]: + return [KubernetesInfrastructureStage] diff --git a/src/_nebari/template/stages/02-infrastructure/aws/locals.tf b/src/_nebari/stages/infrastructure/template/aws/locals.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/locals.tf rename to src/_nebari/stages/infrastructure/template/aws/locals.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/main.tf b/src/_nebari/stages/infrastructure/template/aws/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/main.tf rename to src/_nebari/stages/infrastructure/template/aws/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/accounting/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/accounting/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/accounting/main.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/accounting/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/accounting/variables.tf b/src/_nebari/stages/infrastructure/template/aws/modules/accounting/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/accounting/variables.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/accounting/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/efs/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/efs/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/efs/main.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/efs/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/efs/outputs.tf b/src/_nebari/stages/infrastructure/template/aws/modules/efs/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/efs/outputs.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/efs/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/efs/variables.tf b/src/_nebari/stages/infrastructure/template/aws/modules/efs/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/efs/variables.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/efs/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/kafka/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/kafka/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/kafka/main.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/kafka/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/kafka/outputs.tf b/src/_nebari/stages/infrastructure/template/aws/modules/kafka/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/kafka/outputs.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/kafka/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/kafka/variables.tf b/src/_nebari/stages/infrastructure/template/aws/modules/kafka/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/kafka/variables.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/kafka/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/autoscaling.tf b/src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/autoscaling.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/autoscaling.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/autoscaling.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/locals.tf b/src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/locals.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/locals.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/locals.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/main.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/outputs.tf b/src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/outputs.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/policy.tf b/src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/policy.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/policy.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/policy.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/variables.tf b/src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/kubernetes/variables.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/kubernetes/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/network/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/network/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/network/main.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/network/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/network/outputs.tf b/src/_nebari/stages/infrastructure/template/aws/modules/network/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/network/outputs.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/network/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/network/variables.tf b/src/_nebari/stages/infrastructure/template/aws/modules/network/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/network/variables.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/network/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/permissions/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/permissions/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/permissions/main.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/permissions/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/permissions/outputs.tf b/src/_nebari/stages/infrastructure/template/aws/modules/permissions/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/permissions/outputs.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/permissions/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/permissions/variables.tf b/src/_nebari/stages/infrastructure/template/aws/modules/permissions/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/permissions/variables.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/permissions/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/rds/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/rds/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/rds/main.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/rds/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/rds/outputs.tf b/src/_nebari/stages/infrastructure/template/aws/modules/rds/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/rds/outputs.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/rds/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/rds/users.tf b/src/_nebari/stages/infrastructure/template/aws/modules/rds/users.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/rds/users.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/rds/users.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/rds/variables.tf b/src/_nebari/stages/infrastructure/template/aws/modules/rds/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/rds/variables.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/rds/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/registry/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/registry/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/registry/main.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/registry/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/registry/outputs.tf b/src/_nebari/stages/infrastructure/template/aws/modules/registry/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/registry/outputs.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/registry/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/registry/variables.tf b/src/_nebari/stages/infrastructure/template/aws/modules/registry/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/registry/variables.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/registry/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/s3/main.tf b/src/_nebari/stages/infrastructure/template/aws/modules/s3/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/s3/main.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/s3/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/s3/outputs.tf b/src/_nebari/stages/infrastructure/template/aws/modules/s3/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/s3/outputs.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/s3/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/modules/s3/variables.tf b/src/_nebari/stages/infrastructure/template/aws/modules/s3/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/modules/s3/variables.tf rename to src/_nebari/stages/infrastructure/template/aws/modules/s3/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/outputs.tf b/src/_nebari/stages/infrastructure/template/aws/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/outputs.tf rename to src/_nebari/stages/infrastructure/template/aws/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/aws/variables.tf b/src/_nebari/stages/infrastructure/template/aws/variables.tf similarity index 92% rename from src/_nebari/template/stages/02-infrastructure/aws/variables.tf rename to src/_nebari/stages/infrastructure/template/aws/variables.tf index a9ab302f1..593827d0a 100644 --- a/src/_nebari/template/stages/02-infrastructure/aws/variables.tf +++ b/src/_nebari/stages/infrastructure/template/aws/variables.tf @@ -11,13 +11,11 @@ variable "environment" { variable "existing_subnet_ids" { description = "Existing VPC ID to use for Kubernetes resources" type = list(string) - default = null } variable "existing_security_group_id" { description = "Existing security group ID to use for Kubernetes resources" type = string - default = null } variable "region" { @@ -46,19 +44,16 @@ variable "node_groups" { variable "availability_zones" { description = "AWS availability zones within AWS region" type = list(string) - default = [] } variable "vpc_cidr_block" { description = "VPC cidr block for infastructure" type = string - default = "10.10.0.0/16" } variable "kubeconfig_filename" { description = "Kubernetes kubeconfig written to filesystem" type = string - default = null } variable "eks_endpoint_private_access" { diff --git a/src/_nebari/template/stages/02-infrastructure/aws/versions.tf b/src/_nebari/stages/infrastructure/template/aws/versions.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/aws/versions.tf rename to src/_nebari/stages/infrastructure/template/aws/versions.tf diff --git a/src/_nebari/template/stages/02-infrastructure/azure/main.tf b/src/_nebari/stages/infrastructure/template/azure/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/azure/main.tf rename to src/_nebari/stages/infrastructure/template/azure/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/azure/modules/kubernetes/main.tf b/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/azure/modules/kubernetes/main.tf rename to src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/azure/modules/kubernetes/outputs.tf b/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/azure/modules/kubernetes/outputs.tf rename to src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/azure/modules/kubernetes/variables.tf b/src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/azure/modules/kubernetes/variables.tf rename to src/_nebari/stages/infrastructure/template/azure/modules/kubernetes/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/azure/modules/registry/main.tf b/src/_nebari/stages/infrastructure/template/azure/modules/registry/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/azure/modules/registry/main.tf rename to src/_nebari/stages/infrastructure/template/azure/modules/registry/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/azure/modules/registry/variables.tf b/src/_nebari/stages/infrastructure/template/azure/modules/registry/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/azure/modules/registry/variables.tf rename to src/_nebari/stages/infrastructure/template/azure/modules/registry/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/azure/outputs.tf b/src/_nebari/stages/infrastructure/template/azure/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/azure/outputs.tf rename to src/_nebari/stages/infrastructure/template/azure/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/azure/providers.tf b/src/_nebari/stages/infrastructure/template/azure/providers.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/azure/providers.tf rename to src/_nebari/stages/infrastructure/template/azure/providers.tf diff --git a/src/_nebari/template/stages/02-infrastructure/azure/variables.tf b/src/_nebari/stages/infrastructure/template/azure/variables.tf similarity index 97% rename from src/_nebari/template/stages/02-infrastructure/azure/variables.tf rename to src/_nebari/stages/infrastructure/template/azure/variables.tf index bcbefa688..9616bf2b0 100644 --- a/src/_nebari/template/stages/02-infrastructure/azure/variables.tf +++ b/src/_nebari/stages/infrastructure/template/azure/variables.tf @@ -30,7 +30,6 @@ variable "node_groups" { variable "kubeconfig_filename" { description = "Kubernetes kubeconfig written to filesystem" type = string - default = null } variable "resource_group_name" { @@ -46,7 +45,6 @@ variable "node_resource_group_name" { variable "vnet_subnet_id" { description = "The ID of a Subnet where the Kubernetes Node Pool should exist. Changing this forces a new resource to be created." type = string - default = null } variable "private_cluster_enabled" { diff --git a/src/_nebari/template/stages/02-infrastructure/azure/versions.tf b/src/_nebari/stages/infrastructure/template/azure/versions.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/azure/versions.tf rename to src/_nebari/stages/infrastructure/template/azure/versions.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/main.tf b/src/_nebari/stages/infrastructure/template/do/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/main.tf rename to src/_nebari/stages/infrastructure/template/do/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/modules/kubernetes/locals.tf b/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/locals.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/modules/kubernetes/locals.tf rename to src/_nebari/stages/infrastructure/template/do/modules/kubernetes/locals.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/modules/kubernetes/main.tf b/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/modules/kubernetes/main.tf rename to src/_nebari/stages/infrastructure/template/do/modules/kubernetes/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/modules/kubernetes/outputs.tf b/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/modules/kubernetes/outputs.tf rename to src/_nebari/stages/infrastructure/template/do/modules/kubernetes/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/modules/kubernetes/variables.tf b/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/modules/kubernetes/variables.tf rename to src/_nebari/stages/infrastructure/template/do/modules/kubernetes/variables.tf diff --git a/src/_nebari/template/stages/01-terraform-state/do/modules/spaces/versions.tf b/src/_nebari/stages/infrastructure/template/do/modules/kubernetes/versions.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/do/modules/spaces/versions.tf rename to src/_nebari/stages/infrastructure/template/do/modules/kubernetes/versions.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/modules/registry/main.tf b/src/_nebari/stages/infrastructure/template/do/modules/registry/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/modules/registry/main.tf rename to src/_nebari/stages/infrastructure/template/do/modules/registry/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/modules/registry/variable.tf b/src/_nebari/stages/infrastructure/template/do/modules/registry/variable.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/modules/registry/variable.tf rename to src/_nebari/stages/infrastructure/template/do/modules/registry/variable.tf diff --git a/src/_nebari/template/stages/01-terraform-state/do/modules/terraform-state/versions.tf b/src/_nebari/stages/infrastructure/template/do/modules/registry/versions.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/do/modules/terraform-state/versions.tf rename to src/_nebari/stages/infrastructure/template/do/modules/registry/versions.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/outputs.tf b/src/_nebari/stages/infrastructure/template/do/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/outputs.tf rename to src/_nebari/stages/infrastructure/template/do/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/providers.tf b/src/_nebari/stages/infrastructure/template/do/providers.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/providers.tf rename to src/_nebari/stages/infrastructure/template/do/providers.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/variables.tf b/src/_nebari/stages/infrastructure/template/do/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/variables.tf rename to src/_nebari/stages/infrastructure/template/do/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/modules/kubernetes/versions.tf b/src/_nebari/stages/infrastructure/template/do/versions.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/modules/kubernetes/versions.tf rename to src/_nebari/stages/infrastructure/template/do/versions.tf diff --git a/src/_nebari/template/stages/02-infrastructure/existing/main.tf b/src/_nebari/stages/infrastructure/template/existing/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/existing/main.tf rename to src/_nebari/stages/infrastructure/template/existing/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/main.tf b/src/_nebari/stages/infrastructure/template/gcp/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/main.tf rename to src/_nebari/stages/infrastructure/template/gcp/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/locals.tf b/src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/locals.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/locals.tf rename to src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/locals.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/main.tf b/src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/main.tf rename to src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/outputs.tf b/src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/outputs.tf rename to src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/service_account.tf b/src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/service_account.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/service_account.tf rename to src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/service_account.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/templates/kubeconfig.yaml b/src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/templates/kubeconfig.yaml similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/templates/kubeconfig.yaml rename to src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/templates/kubeconfig.yaml diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/variables.tf b/src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/modules/kubernetes/variables.tf rename to src/_nebari/stages/infrastructure/template/gcp/modules/kubernetes/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/modules/network/main.tf b/src/_nebari/stages/infrastructure/template/gcp/modules/network/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/modules/network/main.tf rename to src/_nebari/stages/infrastructure/template/gcp/modules/network/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/modules/network/variables.tf b/src/_nebari/stages/infrastructure/template/gcp/modules/network/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/modules/network/variables.tf rename to src/_nebari/stages/infrastructure/template/gcp/modules/network/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/modules/registry/main.tf b/src/_nebari/stages/infrastructure/template/gcp/modules/registry/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/modules/registry/main.tf rename to src/_nebari/stages/infrastructure/template/gcp/modules/registry/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/modules/registry/variables.tf b/src/_nebari/stages/infrastructure/template/gcp/modules/registry/variables.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/modules/registry/variables.tf rename to src/_nebari/stages/infrastructure/template/gcp/modules/registry/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/outputs.tf b/src/_nebari/stages/infrastructure/template/gcp/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/outputs.tf rename to src/_nebari/stages/infrastructure/template/gcp/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/provider.tf b/src/_nebari/stages/infrastructure/template/gcp/provider.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/provider.tf rename to src/_nebari/stages/infrastructure/template/gcp/provider.tf diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/variables.tf b/src/_nebari/stages/infrastructure/template/gcp/variables.tf similarity index 90% rename from src/_nebari/template/stages/02-infrastructure/gcp/variables.tf rename to src/_nebari/stages/infrastructure/template/gcp/variables.tf index 18607c7b6..e89f82003 100644 --- a/src/_nebari/template/stages/02-infrastructure/gcp/variables.tf +++ b/src/_nebari/stages/infrastructure/template/gcp/variables.tf @@ -21,25 +21,21 @@ variable "project_id" { variable "availability_zones" { description = "Availability zones to use for nebari deployment" type = list(string) - default = [] } variable "node_groups" { description = "GCP node groups" type = any - default = null } variable "kubeconfig_filename" { description = "Kubernetes kubeconfig written to filesystem" type = string - default = null } variable "tags" { description = "Google Cloud Platform tags to assign to resources" - type = map(string) - default = {} + type = list(string) } variable "kubernetes_version" { @@ -55,19 +51,16 @@ variable "release_channel" { variable "networking_mode" { description = "Determines whether alias IPs or routes will be used for pod IPs in the cluster. Options are VPC_NATIVE or ROUTES." type = string - default = "ROUTES" } variable "network" { description = "Name of the VPC network, where the cluster should be deployed" type = string - default = "default" } variable "subnetwork" { description = "Name of the subnet for deploying cluster into" type = string - default = null } variable "ip_allocation_policy" { @@ -78,7 +71,6 @@ variable "ip_allocation_policy" { cluster_ipv4_cidr_block = string services_ipv4_cidr_block = string })) - default = null } variable "master_authorized_networks_config" { @@ -89,7 +81,6 @@ variable "master_authorized_networks_config" { display_name = string })) })) - default = null } variable "private_cluster_config" { @@ -99,5 +90,4 @@ variable "private_cluster_config" { enable_private_endpoint = bool master_ipv4_cidr_block = string })) - default = null } diff --git a/src/_nebari/template/stages/02-infrastructure/gcp/versions.tf b/src/_nebari/stages/infrastructure/template/gcp/versions.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/gcp/versions.tf rename to src/_nebari/stages/infrastructure/template/gcp/versions.tf diff --git a/src/_nebari/template/stages/02-infrastructure/local/main.tf b/src/_nebari/stages/infrastructure/template/local/main.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/local/main.tf rename to src/_nebari/stages/infrastructure/template/local/main.tf diff --git a/src/_nebari/template/stages/02-infrastructure/local/metallb.yaml b/src/_nebari/stages/infrastructure/template/local/metallb.yaml similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/local/metallb.yaml rename to src/_nebari/stages/infrastructure/template/local/metallb.yaml diff --git a/src/_nebari/template/stages/02-infrastructure/local/outputs.tf b/src/_nebari/stages/infrastructure/template/local/outputs.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/local/outputs.tf rename to src/_nebari/stages/infrastructure/template/local/outputs.tf diff --git a/src/_nebari/template/stages/02-infrastructure/local/variables.tf b/src/_nebari/stages/infrastructure/template/local/variables.tf similarity index 92% rename from src/_nebari/template/stages/02-infrastructure/local/variables.tf rename to src/_nebari/stages/infrastructure/template/local/variables.tf index 097bb1959..246632e92 100644 --- a/src/_nebari/template/stages/02-infrastructure/local/variables.tf +++ b/src/_nebari/stages/infrastructure/template/local/variables.tf @@ -1,7 +1,6 @@ variable "kubeconfig_filename" { description = "Kubernetes kubeconfig written to filesystem" type = string - default = null } variable "kube_context" { diff --git a/src/_nebari/stages/input_vars.py b/src/_nebari/stages/input_vars.py deleted file mode 100644 index 889a81ec8..000000000 --- a/src/_nebari/stages/input_vars.py +++ /dev/null @@ -1,403 +0,0 @@ -import json -import tempfile -from pathlib import Path -from urllib.parse import urlencode - -from _nebari.constants import ( - DEFAULT_CONDA_STORE_IMAGE_TAG, - DEFAULT_GKE_RELEASE_CHANNEL, - DEFAULT_NEBARI_WORKFLOW_CONTROLLER_IMAGE_TAG, - DEFAULT_TRAEFIK_IMAGE_TAG, -) - - -def stage_01_terraform_state(stage_outputs, config): - if config["provider"] == "do": - return { - "name": config["project_name"], - "namespace": config["namespace"], - "region": config["digital_ocean"]["region"], - } - elif config["provider"] == "gcp": - return { - "name": config["project_name"], - "namespace": config["namespace"], - "region": config["google_cloud_platform"]["region"], - } - elif config["provider"] == "aws": - return { - "name": config["project_name"], - "namespace": config["namespace"], - } - elif config["provider"] == "azure": - return { - "name": config["project_name"], - "namespace": config["namespace"], - "region": config["azure"]["region"], - "storage_account_postfix": config["azure"]["storage_account_postfix"], - "state_resource_group_name": f'{config["project_name"]}-{config["namespace"]}-state', - } - else: - return {} - - -def stage_02_infrastructure(stage_outputs, config): - if config["provider"] == "local": - return { - "kubeconfig_filename": str( - Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG" - ), - "kube_context": config["local"].get("kube_context"), - } - elif config["provider"] == "existing": - return {"kube_context": config["existing"].get("kube_context")} - elif config["provider"] == "do": - return { - "name": config["project_name"], - "environment": config["namespace"], - "region": config["digital_ocean"]["region"], - "kubernetes_version": config["digital_ocean"]["kubernetes_version"], - "node_groups": config["digital_ocean"]["node_groups"], - "kubeconfig_filename": str( - Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG" - ), - **config.get("do", {}).get("terraform_overrides", {}), - } - elif config["provider"] == "gcp": - return { - "name": config["project_name"], - "environment": config["namespace"], - "region": config["google_cloud_platform"]["region"], - "project_id": config["google_cloud_platform"]["project"], - "kubernetes_version": config["google_cloud_platform"]["kubernetes_version"], - "release_channel": config.get("google_cloud_platform", {}).get( - "release_channel", DEFAULT_GKE_RELEASE_CHANNEL - ), - "node_groups": [ - { - "name": key, - "instance_type": value["instance"], - "min_size": value["min_nodes"], - "max_size": value["max_nodes"], - "guest_accelerators": value["guest_accelerators"] - if "guest_accelerators" in value - else [], - **value, - } - for key, value in config["google_cloud_platform"]["node_groups"].items() - ], - "kubeconfig_filename": str( - Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG" - ), - **config.get("gcp", {}).get("terraform_overrides", {}), - } - elif config["provider"] == "azure": - return { - "name": config["project_name"], - "environment": config["namespace"], - "region": config["azure"]["region"], - "kubernetes_version": config["azure"]["kubernetes_version"], - "node_groups": config["azure"]["node_groups"], - "kubeconfig_filename": str( - Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG" - ), - "resource_group_name": f'{config["project_name"]}-{config["namespace"]}', - "node_resource_group_name": f'{config["project_name"]}-{config["namespace"]}-node-resource-group', - **config.get("azure", {}).get("terraform_overrides", {}), - } - elif config["provider"] == "aws": - return { - "name": config["project_name"], - "environment": config["namespace"], - "region": config["amazon_web_services"]["region"], - "kubernetes_version": config["amazon_web_services"]["kubernetes_version"], - "node_groups": [ - { - "name": key, - "min_size": value["min_nodes"], - "desired_size": max(value["min_nodes"], 1), - "max_size": value["max_nodes"], - "gpu": value.get("gpu", False), - "instance_type": value["instance"], - "single_subnet": value.get("single_subnet", False), - } - for key, value in config["amazon_web_services"]["node_groups"].items() - ], - "kubeconfig_filename": str( - Path(tempfile.gettempdir()) / "NEBARI_KUBECONFIG" - ), - **config.get("amazon_web_services", {}).get("terraform_overrides", {}), - } - else: - return {} - - -def stage_03_kubernetes_initialize(stage_outputs, config): - if config["provider"] == "gcp": - gpu_enabled = any( - node_group.get("guest_accelerators") - for node_group in config["google_cloud_platform"]["node_groups"].values() - ) - gpu_node_group_names = [] - - elif config["provider"] == "aws": - gpu_enabled = any( - node_group.get("gpu") - for node_group in config["amazon_web_services"]["node_groups"].values() - ) - gpu_node_group_names = [ - group for group in config["amazon_web_services"]["node_groups"].keys() - ] - else: - gpu_enabled = False - gpu_node_group_names = [] - - return { - "name": config["project_name"], - "environment": config["namespace"], - "cloud-provider": config["provider"], - "aws-region": config.get("amazon_web_services", {}).get("region"), - "external_container_reg": config.get( - "external_container_reg", {"enabled": False} - ), - "gpu_enabled": gpu_enabled, - "gpu_node_group_names": gpu_node_group_names, - } - - -def _calculate_node_groups(config): - if config["provider"] == "aws": - return { - group: {"key": "eks.amazonaws.com/nodegroup", "value": group} - for group in ["general", "user", "worker"] - } - elif config["provider"] == "gcp": - return { - group: {"key": "cloud.google.com/gke-nodepool", "value": group} - for group in ["general", "user", "worker"] - } - elif config["provider"] == "azure": - return { - group: {"key": "azure-node-pool", "value": group} - for group in ["general", "user", "worker"] - } - elif config["provider"] == "do": - return { - group: {"key": "doks.digitalocean.com/node-pool", "value": group} - for group in ["general", "user", "worker"] - } - elif config["provider"] == "existing": - return config["existing"].get("node_selectors") - else: - return config["local"]["node_selectors"] - - -def stage_04_kubernetes_ingress(stage_outputs, config): - cert_type = config["certificate"]["type"] - cert_details = {"certificate-service": cert_type} - if cert_type == "lets-encrypt": - cert_details["acme-email"] = config["certificate"]["acme_email"] - cert_details["acme-server"] = config["certificate"]["acme_server"] - elif cert_type == "existing": - cert_details["certificate-secret-name"] = config["certificate"]["secret_name"] - - return { - **{ - "traefik-image": { - "image": "traefik", - "tag": DEFAULT_TRAEFIK_IMAGE_TAG, - }, - "name": config["project_name"], - "environment": config["namespace"], - "node_groups": _calculate_node_groups(config), - **config.get("ingress", {}).get("terraform_overrides", {}), - }, - **cert_details, - } - - -def stage_05_kubernetes_keycloak(stage_outputs, config): - initial_root_password = ( - config["security"].get("keycloak", {}).get("initial_root_password", "") - ) - if initial_root_password is None: - initial_root_password = "" - - return { - "name": config["project_name"], - "environment": config["namespace"], - "endpoint": config["domain"], - "initial-root-password": initial_root_password, - "overrides": [ - json.dumps(config["security"].get("keycloak", {}).get("overrides", {})) - ], - "node-group": _calculate_node_groups(config)["general"], - } - - -def stage_06_kubernetes_keycloak_configuration(stage_outputs, config): - realm_id = "nebari" - - users_group = ( - ["users"] if config["security"].get("shared_users_group", False) else [] - ) - - return { - "realm": realm_id, - "realm_display_name": config["security"] - .get("keycloak", {}) - .get("realm_display_name", realm_id), - "authentication": config["security"]["authentication"], - "keycloak_groups": ["superadmin", "admin", "developer", "analyst"] - + users_group, - "default_groups": ["analyst"] + users_group, - } - - -def _split_docker_image_name(image_name): - name, tag = image_name.split(":") - return {"name": name, "tag": tag} - - -def stage_07_kubernetes_services(stage_outputs, config): - final_logout_uri = f"https://{config['domain']}/hub/login" - - # Compound any logout URLs from extensions so they are are logged out in succession - # when Keycloak and JupyterHub are logged out - for ext in config.get("tf_extensions", []): - if ext.get("logout", "") != "": - final_logout_uri = "{}?{}".format( - f"https://{config['domain']}/{ext['urlslug']}{ext['logout']}", - urlencode({"redirect_uri": final_logout_uri}), - ) - jupyterhub_theme = config["theme"]["jupyterhub"] - if config["theme"]["jupyterhub"].get("display_version") and ( - not config["theme"]["jupyterhub"].get("version", False) - ): - jupyterhub_theme.update({"version": f"v{config['nebari_version']}"}) - - return { - "name": config["project_name"], - "environment": config["namespace"], - "endpoint": config["domain"], - "realm_id": stage_outputs["stages/06-kubernetes-keycloak-configuration"][ - "realm_id" - ]["value"], - "node_groups": _calculate_node_groups(config), - # conda-store - "conda-store-environments": config["environments"], - "conda-store-filesystem-storage": config["storage"]["conda_store"], - "conda-store-service-token-scopes": { - "cdsdashboards": { - "primary_namespace": "cdsdashboards", - "role_bindings": { - "*/*": ["viewer"], - }, - }, - "dask-gateway": { - "primary_namespace": "", - "role_bindings": { - "*/*": ["viewer"], - }, - }, - "argo-workflows-jupyter-scheduler": { - "primary_namespace": "", - "role_bindings": { - "*/*": ["viewer"], - }, - }, - }, - "conda-store-default-namespace": config.get("conda_store", {}).get( - "default_namespace", "nebari-git" - ), - "conda-store-extra-settings": config.get("conda_store", {}).get( - "extra_settings", {} - ), - "conda-store-extra-config": config.get("conda_store", {}).get( - "extra_config", "" - ), - "conda-store-image-tag": config.get("conda_store", {}).get( - "image_tag", DEFAULT_CONDA_STORE_IMAGE_TAG - ), - # jupyterhub - "cdsdashboards": config["cdsdashboards"], - "jupyterhub-theme": jupyterhub_theme, - "jupyterhub-image": _split_docker_image_name( - config["default_images"]["jupyterhub"] - ), - "jupyterhub-shared-storage": config["storage"]["shared_filesystem"], - "jupyterhub-shared-endpoint": stage_outputs["stages/02-infrastructure"] - .get("nfs_endpoint", {}) - .get("value"), - "jupyterlab-profiles": config["profiles"]["jupyterlab"], - "jupyterlab-image": _split_docker_image_name( - config["default_images"]["jupyterlab"] - ), - "jupyterhub-overrides": [ - json.dumps(config.get("jupyterhub", {}).get("overrides", {})) - ], - "jupyterhub-hub-extraEnv": json.dumps( - config.get("jupyterhub", {}) - .get("overrides", {}) - .get("hub", {}) - .get("extraEnv", []) - ), - # jupyterlab - "idle-culler-settings": config.get("jupyterlab", {}).get("idle_culler", {}), - # dask-gateway - "dask-worker-image": _split_docker_image_name( - config["default_images"]["dask_worker"] - ), - "dask-gateway-profiles": config["profiles"]["dask_worker"], - # monitoring - "monitoring-enabled": config["monitoring"]["enabled"], - # argo-worfklows - "argo-workflows-enabled": config["argo_workflows"]["enabled"], - "argo-workflows-overrides": [ - json.dumps(config.get("argo_workflows", {}).get("overrides", {})) - ], - "nebari-workflow-controller": config["argo_workflows"] - .get("nebari_workflow_controller", {}) - .get("enabled", True), - "keycloak-read-only-user-credentials": stage_outputs[ - "stages/06-kubernetes-keycloak-configuration" - ]["keycloak-read-only-user-credentials"]["value"], - "workflow-controller-image-tag": config.get("argo_workflows", {}) - .get("nebari_workflow_controller", {}) - .get( - "image_tag", - DEFAULT_NEBARI_WORKFLOW_CONTROLLER_IMAGE_TAG, - ), - # kbatch - "kbatch-enabled": config["kbatch"]["enabled"], - # prefect - "prefect-enabled": config.get("prefect", {}).get("enabled", False), - "prefect-token": config.get("prefect", {}).get("token", ""), - "prefect-image": config.get("prefect", {}).get("image", ""), - "prefect-overrides": config.get("prefect", {}).get("overrides", {}), - # clearml - "clearml-enabled": config.get("clearml", {}).get("enabled", False), - "clearml-enable-forwardauth": config.get("clearml", {}).get( - "enable_forward_auth", False - ), - "clearml-overrides": [ - json.dumps(config.get("clearml", {}).get("overrides", {})) - ], - "jupyterhub-logout-redirect-url": final_logout_uri, - } - - -def stage_08_nebari_tf_extensions(stage_outputs, config): - return { - "environment": config["namespace"], - "endpoint": config["domain"], - "realm_id": stage_outputs["stages/06-kubernetes-keycloak-configuration"][ - "realm_id" - ]["value"], - "tf_extensions": config.get("tf_extensions", []), - "nebari_config_yaml": config, - "keycloak_nebari_bot_password": stage_outputs["stages/05-kubernetes-keycloak"][ - "keycloak_nebari_bot_password" - ]["value"], - "helm_extensions": config.get("helm_extensions", []), - } diff --git a/src/_nebari/stages/kubernetes_ingress/__init__.py b/src/_nebari/stages/kubernetes_ingress/__init__.py new file mode 100644 index 000000000..28e5679c6 --- /dev/null +++ b/src/_nebari/stages/kubernetes_ingress/__init__.py @@ -0,0 +1,303 @@ +import enum +import logging +import socket +import sys +import time +import typing +from typing import Any, Dict, List + +from _nebari import constants +from _nebari.provider.dns.cloudflare import update_record +from _nebari.stages.base import NebariTerraformStage +from _nebari.stages.tf_objects import ( + NebariHelmProvider, + NebariKubernetesProvider, + NebariTerraformState, +) +from nebari import schema +from nebari.hookspecs import NebariStage, hookimpl + +logger = logging.getLogger(__name__) + +# check and retry settings +NUM_ATTEMPTS = 10 +TIMEOUT = 10 # seconds + + +def add_clearml_dns(zone_name, record_name, record_type, ip_or_hostname): + dns_records = [ + f"app.clearml.{record_name}", + f"api.clearml.{record_name}", + f"files.clearml.{record_name}", + ] + + for dns_record in dns_records: + update_record(zone_name, dns_record, record_type, ip_or_hostname) + + +def provision_ingress_dns( + stage_outputs, + config, + dns_provider: str, + dns_auto_provision: bool, + disable_prompt: bool = True, + disable_checks: bool = False, +): + directory = "stages/04-kubernetes-ingress" + + ip_or_name = stage_outputs[directory]["load_balancer_address"]["value"] + ip_or_hostname = ip_or_name["hostname"] or ip_or_name["ip"] + + if dns_auto_provision and dns_provider == "cloudflare": + record_name, zone_name = ( + config.domain.split(".")[:-2], + config.domain.split(".")[-2:], + ) + record_name = ".".join(record_name) + zone_name = ".".join(zone_name) + if config.provider in { + schema.ProviderEnum.do, + schema.ProviderEnum.gcp, + schema.ProviderEnum.azure, + }: + update_record(zone_name, record_name, "A", ip_or_hostname) + if config.clearml.enabled: + add_clearml_dns(zone_name, record_name, "A", ip_or_hostname) + + elif config.provider == schema.ProviderEnum.aws: + update_record(zone_name, record_name, "CNAME", ip_or_hostname) + if config.clearml.enabled: + add_clearml_dns(zone_name, record_name, "CNAME", ip_or_hostname) + else: + logger.info( + f"Couldn't update the DNS record for cloud provider: {config.provider}" + ) + elif not disable_prompt: + input( + f"Take IP Address {ip_or_hostname} and update DNS to point to " + f'"{config.domain}" [Press Enter when Complete]' + ) + + if not disable_checks: + check_ingress_dns(stage_outputs, config, disable_prompt) + + +def check_ingress_dns(stage_outputs, config, disable_prompt): + directory = "stages/04-kubernetes-ingress" + + ip_or_name = stage_outputs[directory]["load_balancer_address"]["value"] + ip = socket.gethostbyname(ip_or_name["hostname"] or ip_or_name["ip"]) + domain_name = stage_outputs[directory]["domain"] + + def _attempt_dns_lookup( + domain_name, ip, num_attempts=NUM_ATTEMPTS, timeout=TIMEOUT + ): + for i in range(num_attempts): + try: + resolved_ip = socket.gethostbyname(domain_name) + if resolved_ip == ip: + print( + f"DNS configured domain={domain_name} matches ingress ip={ip}" + ) + return True + else: + print( + f"Attempt {i+1} polling DNS domain={domain_name} does not match ip={ip} instead got {resolved_ip}" + ) + except socket.gaierror: + print( + f"Attempt {i+1} polling DNS domain={domain_name} record does not exist" + ) + time.sleep(timeout) + return False + + attempt = 0 + while not _attempt_dns_lookup(domain_name, ip): + sleeptime = 60 * (2**attempt) + if not disable_prompt: + input( + f"After attempting to poll the DNS, the record for domain={domain_name} appears not to exist, " + f"has recently been updated, or has yet to fully propagate. This non-deterministic behavior is likely due to " + f"DNS caching and will likely resolve itself in a few minutes.\n\n\tTo poll the DNS again in {sleeptime} seconds " + f"[Press Enter].\n\n...otherwise kill the process and run the deployment again later..." + ) + + print(f"Will attempt to poll DNS again in {sleeptime} seconds...") + time.sleep(sleeptime) + attempt += 1 + if attempt == 5: + print( + f"ERROR: After stage directory={directory} DNS domain={domain_name} does not point to ip={ip}" + ) + sys.exit(1) + + +@schema.yaml_object(schema.yaml) +class CertificateEnum(str, enum.Enum): + letsencrypt = "lets-encrypt" + selfsigned = "self-signed" + existing = "existing" + disabled = "disabled" + + @classmethod + def to_yaml(cls, representer, node): + return representer.represent_str(node.value) + + +class Certificate(schema.Base): + type: CertificateEnum = CertificateEnum.selfsigned + # existing + secret_name: typing.Optional[str] + # lets-encrypt + acme_email: typing.Optional[str] + acme_server: str = "https://acme-v02.api.letsencrypt.org/directory" + + +class DnsProvider(schema.Base): + provider: typing.Optional[str] + + +class Ingress(schema.Base): + terraform_overrides: typing.Dict = {} + + +class InputSchema(schema.Base): + domain: typing.Optional[str] + certificate: Certificate = Certificate() + ingress: Ingress = Ingress() + dns: DnsProvider = DnsProvider() + + +class IngressEndpoint(schema.Base): + ip: str + hostname: str + + +class OutputSchema(schema.Base): + load_balancer_address: typing.List[IngressEndpoint] + domain: str + + +class KubernetesIngressStage(NebariTerraformStage): + name = "04-kubernetes-ingress" + priority = 40 + + input_schema = InputSchema + output_schema = OutputSchema + + def tf_objects(self) -> List[Dict]: + return [ + NebariTerraformState(self.name, self.config), + NebariKubernetesProvider(self.config), + NebariHelmProvider(self.config), + ] + + def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): + cert_type = self.config.certificate.type + cert_details = {"certificate-service": cert_type} + if cert_type == "lets-encrypt": + cert_details["acme-email"] = self.config.certificate.acme_email + cert_details["acme-server"] = self.config.certificate.acme_server + elif cert_type == "existing": + cert_details[ + "certificate-secret-name" + ] = self.config.certificate.secret_name + + return { + **{ + "traefik-image": { + "image": "traefik", + "tag": constants.DEFAULT_TRAEFIK_IMAGE_TAG, + }, + "name": self.config.project_name, + "environment": self.config.namespace, + "node_groups": stage_outputs["stages/02-infrastructure"][ + "node_selectors" + ], + **self.config.ingress.terraform_overrides, + }, + **cert_details, + } + + def set_outputs( + self, stage_outputs: Dict[str, Dict[str, Any]], outputs: Dict[str, Any] + ): + ip_or_name = outputs["load_balancer_address"]["value"] + host = ip_or_name["hostname"] or ip_or_name["ip"] + host = host.strip("\n") + + if self.config.domain is None: + outputs["domain"] = host + else: + outputs["domain"] = self.config.domain + + super().set_outputs(stage_outputs, outputs) + + def post_deploy(self, stage_outputs: Dict[str, Dict[str, Any]]): + if self.config.dns and self.config.dns.provider: + provision_ingress_dns( + stage_outputs, + self.config, + dns_provider=self.config.dns.provider, + dns_auto_provision=True, + disable_prompt=True, + disable_checks=False, + ) + + def check(self, stage_outputs: Dict[str, Dict[str, Any]]): + def _attempt_tcp_connect( + host, port, num_attempts=NUM_ATTEMPTS, timeout=TIMEOUT + ): + for i in range(num_attempts): + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + # normalize hostname to ip address + ip = socket.gethostbyname(host) + s.settimeout(5) + result = s.connect_ex((ip, port)) + if result == 0: + print( + f"Attempt {i+1} succeeded to connect to tcp://{ip}:{port}" + ) + return True + print(f"Attempt {i+1} failed to connect to tcp tcp://{ip}:{port}") + except socket.gaierror: + print(f"Attempt {i+1} failed to get IP for {host}...") + finally: + s.close() + + time.sleep(timeout) + + return False + + tcp_ports = { + 80, # http + 443, # https + 8022, # jupyterhub-ssh ssh + 8023, # jupyterhub-ssh sftp + 9080, # minio + 8786, # dask-scheduler + } + ip_or_name = stage_outputs["stages/" + self.name]["load_balancer_address"][ + "value" + ] + host = ip_or_name["hostname"] or ip_or_name["ip"] + host = host.strip("\n") + + for port in tcp_ports: + if not _attempt_tcp_connect(host, port): + print( + f"ERROR: After stage={self.name} unable to connect to ingress host={host} port={port}" + ) + sys.exit(1) + + print( + f"After stage={self.name} kubernetes ingress available on tcp ports={tcp_ports}" + ) + + check_ingress_dns(stage_outputs, self.config, disable_prompt=False) + + +@hookimpl +def nebari_stage() -> List[NebariStage]: + return [KubernetesIngressStage] diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/locals.tf b/src/_nebari/stages/kubernetes_ingress/template/locals.tf similarity index 100% rename from src/_nebari/template/stages/03-kubernetes-initialize/locals.tf rename to src/_nebari/stages/kubernetes_ingress/template/locals.tf diff --git a/src/_nebari/template/stages/04-kubernetes-ingress/main.tf b/src/_nebari/stages/kubernetes_ingress/template/main.tf similarity index 100% rename from src/_nebari/template/stages/04-kubernetes-ingress/main.tf rename to src/_nebari/stages/kubernetes_ingress/template/main.tf diff --git a/src/_nebari/template/stages/04-kubernetes-ingress/modules/kubernetes/ingress/main.tf b/src/_nebari/stages/kubernetes_ingress/template/modules/kubernetes/ingress/main.tf similarity index 100% rename from src/_nebari/template/stages/04-kubernetes-ingress/modules/kubernetes/ingress/main.tf rename to src/_nebari/stages/kubernetes_ingress/template/modules/kubernetes/ingress/main.tf diff --git a/src/_nebari/template/stages/04-kubernetes-ingress/modules/kubernetes/ingress/outputs.tf b/src/_nebari/stages/kubernetes_ingress/template/modules/kubernetes/ingress/outputs.tf similarity index 100% rename from src/_nebari/template/stages/04-kubernetes-ingress/modules/kubernetes/ingress/outputs.tf rename to src/_nebari/stages/kubernetes_ingress/template/modules/kubernetes/ingress/outputs.tf diff --git a/src/_nebari/template/stages/04-kubernetes-ingress/modules/kubernetes/ingress/variables.tf b/src/_nebari/stages/kubernetes_ingress/template/modules/kubernetes/ingress/variables.tf similarity index 100% rename from src/_nebari/template/stages/04-kubernetes-ingress/modules/kubernetes/ingress/variables.tf rename to src/_nebari/stages/kubernetes_ingress/template/modules/kubernetes/ingress/variables.tf diff --git a/src/_nebari/template/stages/04-kubernetes-ingress/outputs.tf b/src/_nebari/stages/kubernetes_ingress/template/outputs.tf similarity index 100% rename from src/_nebari/template/stages/04-kubernetes-ingress/outputs.tf rename to src/_nebari/stages/kubernetes_ingress/template/outputs.tf diff --git a/src/_nebari/template/stages/04-kubernetes-ingress/variables.tf b/src/_nebari/stages/kubernetes_ingress/template/variables.tf similarity index 100% rename from src/_nebari/template/stages/04-kubernetes-ingress/variables.tf rename to src/_nebari/stages/kubernetes_ingress/template/variables.tf diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/versions.tf b/src/_nebari/stages/kubernetes_ingress/template/versions.tf similarity index 100% rename from src/_nebari/template/stages/03-kubernetes-initialize/versions.tf rename to src/_nebari/stages/kubernetes_ingress/template/versions.tf diff --git a/src/_nebari/stages/kubernetes_initialize/__init__.py b/src/_nebari/stages/kubernetes_initialize/__init__.py new file mode 100644 index 000000000..02f8df6f9 --- /dev/null +++ b/src/_nebari/stages/kubernetes_initialize/__init__.py @@ -0,0 +1,133 @@ +import sys +import typing +from typing import Any, Dict, List, Union + +import pydantic + +from _nebari.stages.base import NebariTerraformStage +from _nebari.stages.tf_objects import ( + NebariHelmProvider, + NebariKubernetesProvider, + NebariTerraformState, +) +from nebari import schema +from nebari.hookspecs import NebariStage, hookimpl + + +class ExtContainerReg(schema.Base): + enabled: bool = False + access_key_id: typing.Optional[str] + secret_access_key: typing.Optional[str] + extcr_account: typing.Optional[str] + extcr_region: typing.Optional[str] + + @pydantic.root_validator + def enabled_must_have_fields(cls, values): + if values["enabled"]: + for fldname in ( + "access_key_id", + "secret_access_key", + "extcr_account", + "extcr_region", + ): + if ( + fldname not in values + or values[fldname] is None + or values[fldname].strip() == "" + ): + raise ValueError( + f"external_container_reg must contain a non-blank {fldname} when enabled is true" + ) + return values + + +class InputVars(schema.Base): + name: str + environment: str + cloud_provider: str + aws_region: Union[str, None] = None + external_container_reg: Union[ExtContainerReg, None] = None + gpu_enabled: bool = False + gpu_node_group_names: List[str] = [] + + +class InputSchema(schema.Base): + external_container_reg: ExtContainerReg = ExtContainerReg() + + +class OutputSchema(schema.Base): + pass + + +class KubernetesInitializeStage(NebariTerraformStage): + name = "03-kubernetes-initialize" + priority = 30 + + input_schema = InputSchema + output_schema = OutputSchema + + def tf_objects(self) -> List[Dict]: + return [ + NebariTerraformState(self.name, self.config), + NebariKubernetesProvider(self.config), + NebariHelmProvider(self.config), + ] + + def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): + input_vars = InputVars( + name=self.config.project_name, + environment=self.config.namespace, + cloud_provider=self.config.provider.value, + external_container_reg=self.config.external_container_reg.dict(), + ) + + if self.config.provider == schema.ProviderEnum.gcp: + input_vars.gpu_enabled = any( + node_group.guest_accelerators + for node_group in self.config.google_cloud_platform.node_groups.values() + ) + + elif self.config.provider == schema.ProviderEnum.aws: + input_vars.gpu_enabled = any( + node_group.gpu + for node_group in self.config.amazon_web_services.node_groups.values() + ) + input_vars.gpu_node_group_names = [ + group for group in self.config.amazon_web_services.node_groups.keys() + ] + input_vars.aws_region = self.config.amazon_web_services.region + + return input_vars.dict() + + def check(self, stage_outputs: Dict[str, Dict[str, Any]]): + from kubernetes import client, config + from kubernetes.client.rest import ApiException + + config.load_kube_config( + config_file=stage_outputs["stages/02-infrastructure"][ + "kubeconfig_filename" + ]["value"] + ) + + try: + api_instance = client.CoreV1Api() + result = api_instance.list_namespace() + except ApiException: + print( + f"ERROR: After stage={self.name} unable to connect to kubernetes cluster" + ) + sys.exit(1) + + namespaces = {_.metadata.name for _ in result.items} + if self.config.namespace not in namespaces: + print( + f"ERROR: After stage={self.name} namespace={self.config.namespace} not provisioned within kubernetes cluster" + ) + sys.exit(1) + + print(f"After stage={self.name} kubernetes initialized successfully") + + +@hookimpl +def nebari_stage() -> List[NebariStage]: + return [KubernetesInitializeStage] diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/external-container-registry.tf b/src/_nebari/stages/kubernetes_initialize/template/external-container-registry.tf similarity index 100% rename from src/_nebari/template/stages/03-kubernetes-initialize/external-container-registry.tf rename to src/_nebari/stages/kubernetes_initialize/template/external-container-registry.tf diff --git a/src/_nebari/template/stages/04-kubernetes-ingress/locals.tf b/src/_nebari/stages/kubernetes_initialize/template/locals.tf similarity index 100% rename from src/_nebari/template/stages/04-kubernetes-ingress/locals.tf rename to src/_nebari/stages/kubernetes_initialize/template/locals.tf diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/main.tf b/src/_nebari/stages/kubernetes_initialize/template/main.tf similarity index 81% rename from src/_nebari/template/stages/03-kubernetes-initialize/main.tf rename to src/_nebari/stages/kubernetes_initialize/template/main.tf index 1a0fae924..402c68fb3 100644 --- a/src/_nebari/template/stages/03-kubernetes-initialize/main.tf +++ b/src/_nebari/stages/kubernetes_initialize/template/main.tf @@ -6,13 +6,13 @@ module "kubernetes-initialization" { } module "kubernetes-autoscaling" { - count = var.cloud-provider == "aws" ? 1 : 0 + count = var.cloud_provider == "aws" ? 1 : 0 source = "./modules/cluster-autoscaler" namespace = var.environment - aws-region = var.aws-region + aws_region = var.aws_region cluster-name = local.cluster_name } @@ -25,7 +25,7 @@ module "nvidia-driver-installer" { source = "./modules/nvidia-installer" - cloud-provider = var.cloud-provider + cloud_provider = var.cloud_provider gpu_enabled = var.gpu_enabled gpu_node_group_names = var.gpu_node_group_names } diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/modules/cluster-autoscaler/main.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/cluster-autoscaler/main.tf similarity index 92% rename from src/_nebari/template/stages/03-kubernetes-initialize/modules/cluster-autoscaler/main.tf rename to src/_nebari/stages/kubernetes_initialize/template/modules/cluster-autoscaler/main.tf index 377998163..29f982c86 100644 --- a/src/_nebari/template/stages/03-kubernetes-initialize/modules/cluster-autoscaler/main.tf +++ b/src/_nebari/stages/kubernetes_initialize/template/modules/cluster-autoscaler/main.tf @@ -13,7 +13,7 @@ resource "helm_release" "autoscaler" { } cloudProvider = "aws" - awsRegion = var.aws-region + awsRegion = var.aws_region autoDiscovery = { clusterName = var.cluster-name diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/modules/cluster-autoscaler/variables.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/cluster-autoscaler/variables.tf similarity index 94% rename from src/_nebari/template/stages/03-kubernetes-initialize/modules/cluster-autoscaler/variables.tf rename to src/_nebari/stages/kubernetes_initialize/template/modules/cluster-autoscaler/variables.tf index 312383f9a..a7169abee 100644 --- a/src/_nebari/template/stages/03-kubernetes-initialize/modules/cluster-autoscaler/variables.tf +++ b/src/_nebari/stages/kubernetes_initialize/template/modules/cluster-autoscaler/variables.tf @@ -8,7 +8,7 @@ variable "cluster-name" { type = string } -variable "aws-region" { +variable "aws_region" { description = "AWS Region that cluster autoscaler is running" type = string } diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/modules/extcr/main.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/extcr/main.tf similarity index 100% rename from src/_nebari/template/stages/03-kubernetes-initialize/modules/extcr/main.tf rename to src/_nebari/stages/kubernetes_initialize/template/modules/extcr/main.tf diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/modules/extcr/variables.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/extcr/variables.tf similarity index 100% rename from src/_nebari/template/stages/03-kubernetes-initialize/modules/extcr/variables.tf rename to src/_nebari/stages/kubernetes_initialize/template/modules/extcr/variables.tf diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/modules/initialization/main.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/initialization/main.tf similarity index 100% rename from src/_nebari/template/stages/03-kubernetes-initialize/modules/initialization/main.tf rename to src/_nebari/stages/kubernetes_initialize/template/modules/initialization/main.tf diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/modules/initialization/variables.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/initialization/variables.tf similarity index 100% rename from src/_nebari/template/stages/03-kubernetes-initialize/modules/initialization/variables.tf rename to src/_nebari/stages/kubernetes_initialize/template/modules/initialization/variables.tf diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/modules/nvidia-installer/aws-nvidia-installer.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/nvidia-installer/aws-nvidia-installer.tf similarity index 96% rename from src/_nebari/template/stages/03-kubernetes-initialize/modules/nvidia-installer/aws-nvidia-installer.tf rename to src/_nebari/stages/kubernetes_initialize/template/modules/nvidia-installer/aws-nvidia-installer.tf index f8706f7b6..4b1500ec9 100644 --- a/src/_nebari/template/stages/03-kubernetes-initialize/modules/nvidia-installer/aws-nvidia-installer.tf +++ b/src/_nebari/stages/kubernetes_initialize/template/modules/nvidia-installer/aws-nvidia-installer.tf @@ -1,5 +1,5 @@ resource "kubernetes_daemonset" "aws_nvidia_installer" { - count = var.gpu_enabled && (var.cloud-provider == "aws") ? 1 : 0 + count = var.gpu_enabled && (var.cloud_provider == "aws") ? 1 : 0 metadata { name = "nvidia-device-plugin-daemonset-1.12" namespace = "kube-system" diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/modules/nvidia-installer/gcp-nvidia-installer.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/nvidia-installer/gcp-nvidia-installer.tf similarity index 98% rename from src/_nebari/template/stages/03-kubernetes-initialize/modules/nvidia-installer/gcp-nvidia-installer.tf rename to src/_nebari/stages/kubernetes_initialize/template/modules/nvidia-installer/gcp-nvidia-installer.tf index 544aed0b8..bb73ac38d 100644 --- a/src/_nebari/template/stages/03-kubernetes-initialize/modules/nvidia-installer/gcp-nvidia-installer.tf +++ b/src/_nebari/stages/kubernetes_initialize/template/modules/nvidia-installer/gcp-nvidia-installer.tf @@ -1,6 +1,6 @@ # source https://cloud.google.com/kubernetes-engine/docs/how-to/gpus#installing_drivers resource "kubernetes_daemonset" "gcp_nvidia_installer" { - count = var.gpu_enabled && (var.cloud-provider == "gcp") ? 1 : 0 + count = var.gpu_enabled && (var.cloud_provider == "gcp") ? 1 : 0 metadata { name = "nvidia-driver-installer" diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/modules/nvidia-installer/variables.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/nvidia-installer/variables.tf similarity index 75% rename from src/_nebari/template/stages/03-kubernetes-initialize/modules/nvidia-installer/variables.tf rename to src/_nebari/stages/kubernetes_initialize/template/modules/nvidia-installer/variables.tf index 1c3e60f3d..9eb9a9b2a 100644 --- a/src/_nebari/template/stages/03-kubernetes-initialize/modules/nvidia-installer/variables.tf +++ b/src/_nebari/stages/kubernetes_initialize/template/modules/nvidia-installer/variables.tf @@ -8,7 +8,7 @@ variable "gpu_enabled" { default = false } -variable "cloud-provider" { - description = "Name of cloud-provider" +variable "cloud_provider" { + description = "Name of cloud_provider" type = string } diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/modules/traefik_crds/main.tf b/src/_nebari/stages/kubernetes_initialize/template/modules/traefik_crds/main.tf similarity index 100% rename from src/_nebari/template/stages/03-kubernetes-initialize/modules/traefik_crds/main.tf rename to src/_nebari/stages/kubernetes_initialize/template/modules/traefik_crds/main.tf diff --git a/src/_nebari/template/stages/03-kubernetes-initialize/variables.tf b/src/_nebari/stages/kubernetes_initialize/template/variables.tf similarity index 87% rename from src/_nebari/template/stages/03-kubernetes-initialize/variables.tf rename to src/_nebari/stages/kubernetes_initialize/template/variables.tf index 87eaa9ed0..f169f5bcf 100644 --- a/src/_nebari/template/stages/03-kubernetes-initialize/variables.tf +++ b/src/_nebari/stages/kubernetes_initialize/template/variables.tf @@ -8,12 +8,12 @@ variable "environment" { type = string } -variable "cloud-provider" { +variable "cloud_provider" { description = "Cloud provider being used in deployment" type = string } -variable "aws-region" { +variable "aws_region" { description = "AWS region is cloud provider is AWS" type = string } @@ -25,10 +25,8 @@ variable "external_container_reg" { variable "gpu_enabled" { description = "Enable GPU support" type = bool - default = false } variable "gpu_node_group_names" { description = "Names of node groups with GPU" - default = [] } diff --git a/src/_nebari/template/stages/04-kubernetes-ingress/versions.tf b/src/_nebari/stages/kubernetes_initialize/template/versions.tf similarity index 100% rename from src/_nebari/template/stages/04-kubernetes-ingress/versions.tf rename to src/_nebari/stages/kubernetes_initialize/template/versions.tf diff --git a/src/_nebari/stages/kubernetes_keycloak/__init__.py b/src/_nebari/stages/kubernetes_keycloak/__init__.py new file mode 100644 index 000000000..ac8882df2 --- /dev/null +++ b/src/_nebari/stages/kubernetes_keycloak/__init__.py @@ -0,0 +1,265 @@ +import contextlib +import enum +import json +import secrets +import string +import sys +import time +import typing +from abc import ABC +from typing import Any, Dict, List + +import pydantic + +from _nebari.stages.base import NebariTerraformStage +from _nebari.stages.tf_objects import ( + NebariHelmProvider, + NebariKubernetesProvider, + NebariTerraformState, +) +from _nebari.utils import modified_environ +from nebari import schema +from nebari.hookspecs import NebariStage, hookimpl + +NUM_ATTEMPTS = 10 +TIMEOUT = 10 + + +class InputVars(schema.Base): + name: str + environment: str + endpoint: str + initial_root_password: str + overrides: List[str] + node_group: Dict[str, str] + + +@contextlib.contextmanager +def keycloak_provider_context(keycloak_credentials: Dict[str, str]): + credential_mapping = { + "client_id": "KEYCLOAK_CLIENT_ID", + "url": "KEYCLOAK_URL", + "username": "KEYCLOAK_USER", + "password": "KEYCLOAK_PASSWORD", + "realm": "KEYCLOAK_REALM", + } + + credentials = {credential_mapping[k]: v for k, v in keycloak_credentials.items()} + with modified_environ(**credentials): + yield + + +@schema.yaml_object(schema.yaml) +class AuthenticationEnum(str, enum.Enum): + password = "password" + github = "GitHub" + auth0 = "Auth0" + custom = "custom" + + @classmethod + def to_yaml(cls, representer, node): + return representer.represent_str(node.value) + + +class GitHubConfig(schema.Base): + client_id: str + client_secret: str + + +class Auth0Config(schema.Base): + client_id: str + client_secret: str + auth0_subdomain: str + + +class Authentication(schema.Base, ABC): + _types: typing.Dict[str, type] = {} + + type: AuthenticationEnum + + # Based on https://github.com/samuelcolvin/pydantic/issues/2177#issuecomment-739578307 + + # This allows type field to determine which subclass of Authentication should be used for validation. + + # Used to register automatically all the submodels in `_types`. + def __init_subclass__(cls): + cls._types[cls._typ.value] = cls + + @classmethod + def __get_validators__(cls): + yield cls.validate + + @classmethod + def validate(cls, value: typing.Dict[str, typing.Any]) -> "Authentication": + if "type" not in value: + raise ValueError("type field is missing from security.authentication") + + specified_type = value.get("type") + sub_class = cls._types.get(specified_type, None) + + if not sub_class: + raise ValueError( + f"No registered Authentication type called {specified_type}" + ) + + # init with right submodel + return sub_class(**value) + + +def random_secure_string( + length: int = 16, chars: str = string.ascii_lowercase + string.digits +): + return "".join(secrets.choice(chars) for i in range(length)) + + +class PasswordAuthentication(Authentication): + _typ = AuthenticationEnum.password + + +class Auth0Authentication(Authentication): + _typ = AuthenticationEnum.auth0 + config: Auth0Config + + +class GitHubAuthentication(Authentication): + _typ = AuthenticationEnum.github + config: GitHubConfig + + +class Keycloak(schema.Base): + initial_root_password: str = pydantic.Field(default_factory=random_secure_string) + overrides: typing.Dict = {} + realm_display_name: str = "Nebari" + + +class Security(schema.Base): + authentication: Authentication = PasswordAuthentication( + type=AuthenticationEnum.password + ) + shared_users_group: bool = True + keycloak: Keycloak = Keycloak() + + +class InputSchema(schema.Base): + security: Security = Security() + + +class KeycloakCredentials(schema.Base): + url: str + client_id: str + realm: str + username: str + password: str + + +class OutputSchema(schema.Base): + keycloak_credentials: KeycloakCredentials + keycloak_nebari_bot_password: str + + +class KubernetesKeycloakStage(NebariTerraformStage): + name = "05-kubernetes-keycloak" + priority = 50 + + input_schema = InputSchema + output_schema = OutputSchema + + def tf_objects(self) -> List[Dict]: + return [ + NebariTerraformState(self.name, self.config), + NebariKubernetesProvider(self.config), + NebariHelmProvider(self.config), + ] + + def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): + return InputVars( + name=self.config.project_name, + environment=self.config.namespace, + endpoint=stage_outputs["stages/04-kubernetes-ingress"]["domain"], + initial_root_password=self.config.security.keycloak.initial_root_password, + overrides=[json.dumps(self.config.security.keycloak.overrides)], + node_group=stage_outputs["stages/02-infrastructure"]["node_selectors"][ + "general" + ], + ).dict() + + def check(self, stage_outputs: Dict[str, Dict[str, Any]]): + from keycloak import KeycloakAdmin + from keycloak.exceptions import KeycloakError + + keycloak_url = f"{stage_outputs['stages/' + self.name]['keycloak_credentials']['value']['url']}/auth/" + + def _attempt_keycloak_connection( + keycloak_url, + username, + password, + realm_name, + client_id, + verify=False, + num_attempts=NUM_ATTEMPTS, + timeout=TIMEOUT, + ): + for i in range(num_attempts): + try: + KeycloakAdmin( + keycloak_url, + username=username, + password=password, + realm_name=realm_name, + client_id=client_id, + verify=verify, + ) + print( + f"Attempt {i+1} succeeded connecting to keycloak master realm" + ) + return True + except KeycloakError: + print(f"Attempt {i+1} failed connecting to keycloak master realm") + time.sleep(timeout) + return False + + if not _attempt_keycloak_connection( + keycloak_url, + stage_outputs["stages/" + self.name]["keycloak_credentials"]["value"][ + "username" + ], + stage_outputs["stages/" + self.name]["keycloak_credentials"]["value"][ + "password" + ], + stage_outputs["stages/" + self.name]["keycloak_credentials"]["value"][ + "realm" + ], + stage_outputs["stages/" + self.name]["keycloak_credentials"]["value"][ + "client_id" + ], + verify=False, + ): + print( + f"ERROR: unable to connect to keycloak master realm at url={keycloak_url} with root credentials" + ) + sys.exit(1) + + print("Keycloak service successfully started") + + @contextlib.contextmanager + def deploy(self, stage_outputs: Dict[str, Dict[str, Any]]): + with super().deploy(stage_outputs): + with keycloak_provider_context( + stage_outputs["stages/" + self.name]["keycloak_credentials"]["value"] + ): + yield + + @contextlib.contextmanager + def destroy( + self, stage_outputs: Dict[str, Dict[str, Any]], status: Dict[str, bool] + ): + with super().destroy(stage_outputs, status): + with keycloak_provider_context( + stage_outputs["stages/" + self.name]["keycloak_credentials"]["value"] + ): + yield + + +@hookimpl +def nebari_stage() -> List[NebariStage]: + return [KubernetesKeycloakStage] diff --git a/src/_nebari/template/stages/05-kubernetes-keycloak/main.tf b/src/_nebari/stages/kubernetes_keycloak/template/main.tf similarity index 81% rename from src/_nebari/template/stages/05-kubernetes-keycloak/main.tf rename to src/_nebari/stages/kubernetes_keycloak/template/main.tf index 51a49ae2f..d22b4ec84 100644 --- a/src/_nebari/template/stages/05-kubernetes-keycloak/main.tf +++ b/src/_nebari/stages/kubernetes_keycloak/template/main.tf @@ -12,9 +12,9 @@ module "kubernetes-keycloak-helm" { nebari-bot-password = random_password.keycloak-nebari-bot-password.result - initial-root-password = var.initial-root-password + initial_root_password = var.initial_root_password overrides = var.overrides - node-group = var.node-group + node_group = var.node_group } diff --git a/src/_nebari/template/stages/05-kubernetes-keycloak/modules/kubernetes/keycloak-helm/main.tf b/src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/main.tf similarity index 89% rename from src/_nebari/template/stages/05-kubernetes-keycloak/modules/kubernetes/keycloak-helm/main.tf rename to src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/main.tf index d9e804ee9..7e02ea102 100644 --- a/src/_nebari/template/stages/05-kubernetes-keycloak/modules/kubernetes/keycloak-helm/main.tf +++ b/src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/main.tf @@ -11,12 +11,12 @@ resource "helm_release" "keycloak" { file("${path.module}/values.yaml"), jsonencode({ nodeSelector = { - "${var.node-group.key}" = var.node-group.value + "${var.node_group.key}" = var.node_group.value } postgresql = { primary = { nodeSelector = { - "${var.node-group.key}" = var.node-group.value + "${var.node_group.key}" = var.node_group.value } } } @@ -30,7 +30,7 @@ resource "helm_release" "keycloak" { set { name = "initial_root_password" - value = var.initial-root-password + value = var.initial_root_password } } diff --git a/src/_nebari/template/stages/05-kubernetes-keycloak/modules/kubernetes/keycloak-helm/outputs.tf b/src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/outputs.tf similarity index 100% rename from src/_nebari/template/stages/05-kubernetes-keycloak/modules/kubernetes/keycloak-helm/outputs.tf rename to src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/outputs.tf diff --git a/src/_nebari/template/stages/05-kubernetes-keycloak/modules/kubernetes/keycloak-helm/values.yaml b/src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/values.yaml similarity index 100% rename from src/_nebari/template/stages/05-kubernetes-keycloak/modules/kubernetes/keycloak-helm/values.yaml rename to src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/values.yaml diff --git a/src/_nebari/template/stages/05-kubernetes-keycloak/modules/kubernetes/keycloak-helm/variables.tf b/src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/variables.tf similarity index 91% rename from src/_nebari/template/stages/05-kubernetes-keycloak/modules/kubernetes/keycloak-helm/variables.tf rename to src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/variables.tf index 1c3648553..90392b1e9 100644 --- a/src/_nebari/template/stages/05-kubernetes-keycloak/modules/kubernetes/keycloak-helm/variables.tf +++ b/src/_nebari/stages/kubernetes_keycloak/template/modules/kubernetes/keycloak-helm/variables.tf @@ -19,12 +19,12 @@ variable "nebari-bot-password" { type = string } -variable "initial-root-password" { +variable "initial_root_password" { description = "initial root password for keycloak" type = string } -variable "node-group" { +variable "node_group" { description = "Node key value pair for bound general resources" type = object({ key = string diff --git a/src/_nebari/template/stages/05-kubernetes-keycloak/outputs.tf b/src/_nebari/stages/kubernetes_keycloak/template/outputs.tf similarity index 100% rename from src/_nebari/template/stages/05-kubernetes-keycloak/outputs.tf rename to src/_nebari/stages/kubernetes_keycloak/template/outputs.tf diff --git a/src/_nebari/template/stages/05-kubernetes-keycloak/variables.tf b/src/_nebari/stages/kubernetes_keycloak/template/variables.tf similarity index 90% rename from src/_nebari/template/stages/05-kubernetes-keycloak/variables.tf rename to src/_nebari/stages/kubernetes_keycloak/template/variables.tf index 6dd2241b8..589b0cca0 100644 --- a/src/_nebari/template/stages/05-kubernetes-keycloak/variables.tf +++ b/src/_nebari/stages/kubernetes_keycloak/template/variables.tf @@ -13,7 +13,7 @@ variable "endpoint" { type = string } -variable "initial-root-password" { +variable "initial_root_password" { description = "Keycloak root user password" type = string } @@ -22,10 +22,9 @@ variable "overrides" { # https://github.com/codecentric/helm-charts/blob/master/charts/keycloak/values.yaml description = "Keycloak helm chart overrides" type = list(string) - default = [] } -variable "node-group" { +variable "node_group" { description = "Node key value pair for bound general resources" type = object({ key = string diff --git a/src/_nebari/template/stages/05-kubernetes-keycloak/versions.tf b/src/_nebari/stages/kubernetes_keycloak/template/versions.tf similarity index 100% rename from src/_nebari/template/stages/05-kubernetes-keycloak/versions.tf rename to src/_nebari/stages/kubernetes_keycloak/template/versions.tf diff --git a/src/_nebari/stages/kubernetes_keycloak_configuration/__init__.py b/src/_nebari/stages/kubernetes_keycloak_configuration/__init__.py new file mode 100644 index 000000000..39ca07a59 --- /dev/null +++ b/src/_nebari/stages/kubernetes_keycloak_configuration/__init__.py @@ -0,0 +1,111 @@ +import sys +import time +from typing import Any, Dict, List + +from _nebari.stages.base import NebariTerraformStage +from _nebari.stages.tf_objects import NebariTerraformState +from nebari import schema +from nebari.hookspecs import NebariStage, hookimpl + +NUM_ATTEMPTS = 10 +TIMEOUT = 10 + + +class InputVars(schema.Base): + realm: str = "nebari" + realm_display_name: str + authentication: Dict[str, Any] + keycloak_groups: List[str] = ["superadmin", "admin", "developer", "analyst"] + default_groups: List[str] = ["analyst"] + + +class KubernetesKeycloakConfigurationStage(NebariTerraformStage): + name = "06-kubernetes-keycloak-configuration" + priority = 60 + + def tf_objects(self) -> List[Dict]: + return [ + NebariTerraformState(self.name, self.config), + ] + + def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): + input_vars = InputVars( + realm_display_name=self.config.security.keycloak.realm_display_name, + authentication=self.config.security.authentication, + ) + + users_group = ["users"] if self.config.security.shared_users_group else [] + + input_vars.keycloak_groups += users_group + input_vars.default_groups += users_group + + return input_vars.dict() + + def check(self, stage_outputs: Dict[str, Dict[str, Any]]): + directory = "stages/05-kubernetes-keycloak" + + from keycloak import KeycloakAdmin + from keycloak.exceptions import KeycloakError + + keycloak_url = ( + f"{stage_outputs[directory]['keycloak_credentials']['value']['url']}/auth/" + ) + + def _attempt_keycloak_connection( + keycloak_url, + username, + password, + realm_name, + client_id, + nebari_realm, + verify=False, + num_attempts=NUM_ATTEMPTS, + timeout=TIMEOUT, + ): + for i in range(num_attempts): + try: + realm_admin = KeycloakAdmin( + keycloak_url, + username=username, + password=password, + realm_name=realm_name, + client_id=client_id, + verify=verify, + ) + existing_realms = {_["id"] for _ in realm_admin.get_realms()} + if nebari_realm in existing_realms: + print( + f"Attempt {i+1} succeeded connecting to keycloak and nebari realm={nebari_realm} exists" + ) + return True + else: + print( + f"Attempt {i+1} succeeded connecting to keycloak but nebari realm did not exist" + ) + except KeycloakError: + print(f"Attempt {i+1} failed connecting to keycloak master realm") + time.sleep(timeout) + return False + + if not _attempt_keycloak_connection( + keycloak_url, + stage_outputs[directory]["keycloak_credentials"]["value"]["username"], + stage_outputs[directory]["keycloak_credentials"]["value"]["password"], + stage_outputs[directory]["keycloak_credentials"]["value"]["realm"], + stage_outputs[directory]["keycloak_credentials"]["value"]["client_id"], + nebari_realm=stage_outputs["stages/06-kubernetes-keycloak-configuration"][ + "realm_id" + ]["value"], + verify=False, + ): + print( + "ERROR: unable to connect to keycloak master realm and ensure that nebari realm exists" + ) + sys.exit(1) + + print("Keycloak service successfully started with nebari realm") + + +@hookimpl +def nebari_stage() -> List[NebariStage]: + return [KubernetesKeycloakConfigurationStage] diff --git a/src/_nebari/template/stages/06-kubernetes-keycloak-configuration/main.tf b/src/_nebari/stages/kubernetes_keycloak_configuration/template/main.tf similarity index 100% rename from src/_nebari/template/stages/06-kubernetes-keycloak-configuration/main.tf rename to src/_nebari/stages/kubernetes_keycloak_configuration/template/main.tf diff --git a/src/_nebari/template/stages/06-kubernetes-keycloak-configuration/outputs.tf b/src/_nebari/stages/kubernetes_keycloak_configuration/template/outputs.tf similarity index 100% rename from src/_nebari/template/stages/06-kubernetes-keycloak-configuration/outputs.tf rename to src/_nebari/stages/kubernetes_keycloak_configuration/template/outputs.tf diff --git a/src/_nebari/template/stages/06-kubernetes-keycloak-configuration/permissions.tf b/src/_nebari/stages/kubernetes_keycloak_configuration/template/permissions.tf similarity index 100% rename from src/_nebari/template/stages/06-kubernetes-keycloak-configuration/permissions.tf rename to src/_nebari/stages/kubernetes_keycloak_configuration/template/permissions.tf diff --git a/src/_nebari/template/stages/06-kubernetes-keycloak-configuration/providers.tf b/src/_nebari/stages/kubernetes_keycloak_configuration/template/providers.tf similarity index 100% rename from src/_nebari/template/stages/06-kubernetes-keycloak-configuration/providers.tf rename to src/_nebari/stages/kubernetes_keycloak_configuration/template/providers.tf diff --git a/src/_nebari/template/stages/06-kubernetes-keycloak-configuration/social_auth.tf b/src/_nebari/stages/kubernetes_keycloak_configuration/template/social_auth.tf similarity index 100% rename from src/_nebari/template/stages/06-kubernetes-keycloak-configuration/social_auth.tf rename to src/_nebari/stages/kubernetes_keycloak_configuration/template/social_auth.tf diff --git a/src/_nebari/template/stages/06-kubernetes-keycloak-configuration/variables.tf b/src/_nebari/stages/kubernetes_keycloak_configuration/template/variables.tf similarity index 100% rename from src/_nebari/template/stages/06-kubernetes-keycloak-configuration/variables.tf rename to src/_nebari/stages/kubernetes_keycloak_configuration/template/variables.tf diff --git a/src/_nebari/template/stages/06-kubernetes-keycloak-configuration/versions.tf b/src/_nebari/stages/kubernetes_keycloak_configuration/template/versions.tf similarity index 100% rename from src/_nebari/template/stages/06-kubernetes-keycloak-configuration/versions.tf rename to src/_nebari/stages/kubernetes_keycloak_configuration/template/versions.tf diff --git a/src/_nebari/stages/kubernetes_services/__init__.py b/src/_nebari/stages/kubernetes_services/__init__.py new file mode 100644 index 000000000..087bac464 --- /dev/null +++ b/src/_nebari/stages/kubernetes_services/__init__.py @@ -0,0 +1,611 @@ +import enum +import json +import os +import sys +import time +import typing +from typing import Any, Dict, List +from urllib.parse import urlencode + +import pydantic +from pydantic import Field + +from _nebari import constants +from _nebari.stages.base import NebariTerraformStage +from _nebari.stages.tf_objects import ( + NebariHelmProvider, + NebariKubernetesProvider, + NebariTerraformState, +) +from _nebari.version import __version__ +from nebari import schema +from nebari.hookspecs import NebariStage, hookimpl + +# check and retry settings +NUM_ATTEMPTS = 10 +TIMEOUT = 10 # seconds + + +def set_docker_image_tag() -> str: + """Set docker image tag for `jupyterlab`, `jupyterhub`, and `dask-worker`.""" + return os.environ.get("NEBARI_IMAGE_TAG", constants.DEFAULT_NEBARI_IMAGE_TAG) + + +def set_nebari_dask_version() -> str: + """Set version of `nebari-dask` meta package.""" + return os.environ.get("NEBARI_DASK_VERSION", constants.DEFAULT_NEBARI_DASK_VERSION) + + +@schema.yaml_object(schema.yaml) +class AccessEnum(str, enum.Enum): + all = "all" + yaml = "yaml" + keycloak = "keycloak" + + @classmethod + def to_yaml(cls, representer, node): + return representer.represent_str(node.value) + + +class Prefect(schema.Base): + enabled: bool = False + image: typing.Optional[str] + overrides: typing.Dict = {} + token: typing.Optional[str] + + +class CDSDashboards(schema.Base): + enabled: bool = True + cds_hide_user_named_servers: bool = True + cds_hide_user_dashboard_servers: bool = False + + +class DefaultImages(schema.Base): + jupyterhub: str = f"quay.io/nebari/nebari-jupyterhub:{set_docker_image_tag()}" + jupyterlab: str = f"quay.io/nebari/nebari-jupyterlab:{set_docker_image_tag()}" + dask_worker: str = f"quay.io/nebari/nebari-dask-worker:{set_docker_image_tag()}" + + +class Storage(schema.Base): + conda_store: str = "200Gi" + shared_filesystem: str = "200Gi" + + +class JupyterHubTheme(schema.Base): + hub_title: str = "Nebari" + hub_subtitle: str = "Your open source data science platform" + welcome: str = """Welcome! Learn about Nebari's features and configurations in the documentation. If you have any questions or feedback, reach the team on Nebari's support forums.""" + logo: str = "https://raw.githubusercontent.com/nebari-dev/nebari-design/main/logo-mark/horizontal/Nebari-Logo-Horizontal-Lockup-White-text.svg" + primary_color: str = "#4f4173" + secondary_color: str = "#957da6" + accent_color: str = "#32C574" + text_color: str = "#111111" + h1_color: str = "#652e8e" + h2_color: str = "#652e8e" + version: str = f"v{__version__}" + display_version: str = "True" # limitation of theme everything is a str + + +class Theme(schema.Base): + jupyterhub: JupyterHubTheme = JupyterHubTheme() + + +class KubeSpawner(schema.Base): + cpu_limit: int + cpu_guarantee: int + mem_limit: str + mem_guarantee: str + + class Config: + extra = "allow" + + +class JupyterLabProfile(schema.Base): + access: AccessEnum = AccessEnum.all + display_name: str + description: str + default: bool = False + users: typing.Optional[typing.List[str]] + groups: typing.Optional[typing.List[str]] + kubespawner_override: typing.Optional[KubeSpawner] + + @pydantic.root_validator + def only_yaml_can_have_groups_and_users(cls, values): + if values["access"] != AccessEnum.yaml: + if ( + values.get("users", None) is not None + or values.get("groups", None) is not None + ): + raise ValueError( + "Profile must not contain groups or users fields unless access = yaml" + ) + return values + + +class DaskWorkerProfile(schema.Base): + worker_cores_limit: int + worker_cores: int + worker_memory_limit: str + worker_memory: str + worker_threads: int = 1 + image: str = f"quay.io/nebari/nebari-dask-worker:{set_docker_image_tag()}" + + class Config: + extra = "allow" + + +class Profiles(schema.Base): + jupyterlab: typing.List[JupyterLabProfile] = [ + JupyterLabProfile( + display_name="Small Instance", + description="Stable environment with 2 cpu / 8 GB ram", + default=True, + kubespawner_override=KubeSpawner( + cpu_limit=2, + cpu_guarantee=1.5, + mem_limit="8G", + mem_guarantee="5G", + ), + ), + JupyterLabProfile( + display_name="Medium Instance", + description="Stable environment with 4 cpu / 16 GB ram", + kubespawner_override=KubeSpawner( + cpu_limit=4, + cpu_guarantee=3, + mem_limit="16G", + mem_guarantee="10G", + ), + ), + ] + dask_worker: typing.Dict[str, DaskWorkerProfile] = { + "Small Worker": DaskWorkerProfile( + worker_cores_limit=2, + worker_cores=1.5, + worker_memory_limit="8G", + worker_memory="5G", + worker_threads=2, + ), + "Medium Worker": DaskWorkerProfile( + worker_cores_limit=4, + worker_cores=3, + worker_memory_limit="16G", + worker_memory="10G", + worker_threads=4, + ), + } + + @pydantic.validator("jupyterlab") + def check_default(cls, v, values): + """Check if only one default value is present.""" + default = [attrs["default"] for attrs in v if "default" in attrs] + if default.count(True) > 1: + raise TypeError( + "Multiple default Jupyterlab profiles may cause unexpected problems." + ) + return v + + +class CondaEnvironment(schema.Base): + name: str + channels: typing.Optional[typing.List[str]] + dependencies: typing.List[typing.Union[str, typing.Dict[str, typing.List[str]]]] + + +class CondaStore(schema.Base): + extra_settings: typing.Dict[str, typing.Any] = {} + extra_config: str = "" + image: str = "quansight/conda-store-server" + image_tag: str = constants.DEFAULT_CONDA_STORE_IMAGE_TAG + default_namespace: str = "nebari-git" + object_storage: str = "200Gi" + + +class NebariWorkflowController(schema.Base): + enabled: bool = True + image_tag: str = constants.DEFAULT_NEBARI_WORKFLOW_CONTROLLER_IMAGE_TAG + + +class ArgoWorkflows(schema.Base): + enabled: bool = True + overrides: typing.Dict = {} + nebari_workflow_controller: NebariWorkflowController = NebariWorkflowController() + + +class KBatch(schema.Base): + enabled: bool = True + + +class Monitoring(schema.Base): + enabled: bool = True + + +class ClearML(schema.Base): + enabled: bool = False + enable_forward_auth: bool = False + overrides: typing.Dict = {} + + +class JupyterHub(schema.Base): + overrides: typing.Dict = {} + + +class IdleCuller(schema.Base): + terminal_cull_inactive_timeout: int = 15 + terminal_cull_interval: int = 5 + kernel_cull_idle_timeout: int = 15 + kernel_cull_interval: int = 5 + kernel_cull_connected: bool = True + kernel_cull_busy: bool = False + server_shutdown_no_activity_timeout: int = 15 + + +class JupyterLab(schema.Base): + idle_culler: IdleCuller = IdleCuller() + + +class InputSchema(schema.Base): + prefect: Prefect = Prefect() + cdsdashboards: CDSDashboards = CDSDashboards() + default_images: DefaultImages = DefaultImages() + storage: Storage = Storage() + theme: Theme = Theme() + profiles: Profiles = Profiles() + environments: typing.Dict[str, CondaEnvironment] = { + "environment-dask.yaml": CondaEnvironment( + name="dask", + channels=["conda-forge"], + dependencies=[ + "python=3.10.8", + "ipykernel=6.21.0", + "ipywidgets==7.7.1", + f"nebari-dask =={set_nebari_dask_version()}", + "python-graphviz=0.20.1", + "pyarrow=10.0.1", + "s3fs=2023.1.0", + "gcsfs=2023.1.0", + "numpy=1.23.5", + "numba=0.56.4", + "pandas=1.5.3", + { + "pip": [ + "kbatch==0.4.1", + ], + }, + ], + ), + "environment-dashboard.yaml": CondaEnvironment( + name="dashboard", + channels=["conda-forge"], + dependencies=[ + "python=3.10", + "cdsdashboards-singleuser=0.6.3", + "cufflinks-py=0.17.3", + "dash=2.8.1", + "geopandas=0.12.2", + "geopy=2.3.0", + "geoviews=1.9.6", + "gunicorn=20.1.0", + "holoviews=1.15.4", + "ipykernel=6.21.2", + "ipywidgets=8.0.4", + "jupyter=1.0.0", + "jupyterlab=3.6.1", + "jupyter_bokeh=3.0.5", + "matplotlib=3.7.0", + f"nebari-dask=={set_nebari_dask_version()}", + "nodejs=18.12.1", + "numpy", + "openpyxl=3.1.1", + "pandas=1.5.3", + "panel=0.14.3", + "param=1.12.3", + "plotly=5.13.0", + "python-graphviz=0.20.1", + "rich=13.3.1", + "streamlit=1.9.0", + "sympy=1.11.1", + "voila=0.4.0", + "pip=23.0", + { + "pip": [ + "streamlit-image-comparison==0.0.3", + "noaa-coops==0.2.1", + "dash_core_components==2.0.0", + "dash_html_components==2.0.0", + ], + }, + ], + ), + } + conda_store: CondaStore = CondaStore() + argo_workflows: ArgoWorkflows = ArgoWorkflows() + kbatch: KBatch = KBatch() + monitoring: Monitoring = Monitoring() + clearml: ClearML = ClearML() + jupyterhub: JupyterHub = JupyterHub() + jupyterlab: JupyterLab = JupyterLab() + + +class OutputSchema(schema.Base): + pass + + +# variables shared by multiple services +class KubernetesServicesInputVars(schema.Base): + name: str + environment: str + endpoint: str + realm_id: str + node_groups: Dict[str, Dict[str, str]] + jupyterhub_logout_redirect_url: str = Field(alias="jupyterhub-logout-redirect-url") + + +def _split_docker_image_name(image_name): + name, tag = image_name.split(":") + return {"name": name, "tag": tag} + + +class ImageNameTag(schema.Base): + name: str + tag: str + + +class CondaStoreInputVars(schema.Base): + conda_store_environments: Dict[str, CondaEnvironment] = Field( + alias="conda-store-environments" + ) + conda_store_default_namespace: str = Field(alias="conda-store-default-namespace") + conda_store_filesystem_storage: str = Field(alias="conda-store-filesystem-storage") + conda_store_object_storage: str = Field(alias="conda-store-object-storage") + conda_store_extra_settings: Dict[str, Any] = Field( + alias="conda-store-extra-settings" + ) + conda_store_extra_config: str = Field(alias="conda-store-extra-config") + conda_store_image: str = Field(alias="conda-store-image") + conda_store_image_tag: str = Field(alias="conda-store-image-tag") + conda_store_service_token_scopes: Dict[str, Dict[str, Any]] = Field( + alias="conda-store-service-token-scopes" + ) + + +class JupyterhubInputVars(schema.Base): + cdsdashboards: Dict[str, Any] + jupyterhub_theme: Dict[str, Any] = Field(alias="jupyterhub-theme") + jupyterlab_image: ImageNameTag = Field(alias="jupyterlab-image") + jupyterhub_overrides: List[str] = Field(alias="jupyterhub-overrides") + jupyterhub_stared_storage: str = Field(alias="jupyterhub-shared-storage") + jupyterhub_shared_endpoint: str = Field(None, alias="jupyterhub-shared-endpoint") + jupyterhub_profiles: List[JupyterLabProfile] = Field(alias="jupyterlab-profiles") + jupyterhub_image: ImageNameTag = Field(alias="jupyterhub-image") + jupyterhub_hub_extraEnv: str = Field(alias="jupyterhub-hub-extraEnv") + idle_culler_settings: Dict[str, Any] = Field(alias="idle-culler-settings") + + +class DaskGatewayInputVars(schema.Base): + dask_worker_image: ImageNameTag = Field(alias="dask-worker-image") + dask_gateway_profiles: Dict[str, Any] = Field(alias="dask-gateway-profiles") + + +class MonitoringInputVars(schema.Base): + monitoring_enabled: bool = Field(alias="monitoring-enabled") + + +class ArgoWorkflowsInputVars(schema.Base): + argo_workflows_enabled: bool = Field(alias="argo-workflows-enabled") + argo_workflows_overrides: List[str] = Field(alias="argo-workflows-overrides") + nebari_workflow_controller: bool = Field(alias="nebari-workflow-controller") + workflow_controller_image_tag: str = Field(alias="workflow-controller-image-tag") + keycloak_read_only_user_credentials: Dict[str, Any] = Field( + alias="keycloak-read-only-user-credentials" + ) + + +class KBatchInputVars(schema.Base): + kbatch_enabled: bool = Field(alias="kbatch-enabled") + + +class PrefectInputVars(schema.Base): + prefect_enabled: bool = Field(alias="prefect-enabled") + prefect_token: str = Field(None, alias="prefect-token") + prefect_image: str = Field(None, alias="prefect-image") + prefect_overrides: Dict = Field(alias="prefect-overrides") + + +class ClearMLInputVars(schema.Base): + clearml_enabled: bool = Field(alias="clearml-enabled") + clearml_enable_forwardauth: bool = Field(alias="clearml-enable-forwardauth") + clearml_overrides: List[str] = Field(alias="clearml-overrides") + + +class KubernetesServicesStage(NebariTerraformStage): + name = "07-kubernetes-services" + priority = 70 + + input_schema = InputSchema + output_schema = OutputSchema + + def tf_objects(self) -> List[Dict]: + return [ + NebariTerraformState(self.name, self.config), + NebariKubernetesProvider(self.config), + NebariHelmProvider(self.config), + ] + + def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): + domain = stage_outputs["stages/04-kubernetes-ingress"]["domain"] + final_logout_uri = f"https://{domain}/hub/login" + + realm_id = stage_outputs["stages/06-kubernetes-keycloak-configuration"][ + "realm_id" + ]["value"] + jupyterhub_shared_endpoint = ( + stage_outputs["stages/02-infrastructure"] + .get("nfs_endpoint", {}) + .get("value") + ) + keycloak_read_only_user_credentials = stage_outputs[ + "stages/06-kubernetes-keycloak-configuration" + ]["keycloak-read-only-user-credentials"]["value"] + + conda_store_token_scopes = { + "cdsdashboards": { + "primary_namespace": "cdsdashboards", + "role_bindings": { + "*/*": ["viewer"], + }, + }, + "dask-gateway": { + "primary_namespace": "", + "role_bindings": { + "*/*": ["viewer"], + }, + }, + "argo-workflows-jupyter-scheduler": { + "primary_namespace": "", + "role_bindings": { + "*/*": ["viewer"], + }, + }, + } + + # Compound any logout URLs from extensions so they are are logged out in succession + # when Keycloak and JupyterHub are logged out + for ext in self.config.tf_extensions: + if ext.logout != "": + final_logout_uri = "{}?{}".format( + f"https://{domain}/{ext.urlslug}{ext.logout}", + urlencode({"redirect_uri": final_logout_uri}), + ) + + jupyterhub_theme = self.config.theme.jupyterhub + if self.config.theme.jupyterhub.display_version and ( + not self.config.theme.jupyterhub.version + ): + jupyterhub_theme.update({"version": f"v{self.config.nebari_version}"}) + + kubernetes_services_vars = KubernetesServicesInputVars( + name=self.config.project_name, + environment=self.config.namespace, + endpoint=domain, + realm_id=realm_id, + node_groups=stage_outputs["stages/02-infrastructure"]["node_selectors"], + jupyterhub_logout_redirect_url=final_logout_uri, + ) + + conda_store_vars = CondaStoreInputVars( + conda_store_environments={ + k: v.dict() for k, v in self.config.environments.items() + }, + conda_store_default_namespace=self.config.conda_store.default_namespace, + conda_store_filesystem_storage=self.config.storage.conda_store, + conda_store_object_storage=self.config.conda_store.object_storage, + conda_store_service_token_scopes=conda_store_token_scopes, + conda_store_extra_settings=self.config.conda_store.extra_settings, + conda_store_extra_config=self.config.conda_store.extra_config, + conda_store_image=self.config.conda_store.image, + conda_store_image_tag=self.config.conda_store.image_tag, + ) + + jupyterhub_vars = JupyterhubInputVars( + cdsdashboards=self.config.cdsdashboards.dict(), + jupyterhub_theme=jupyterhub_theme.dict(), + jupyterlab_image=_split_docker_image_name( + self.config.default_images.jupyterlab + ), + jupyterhub_stared_storage=self.config.storage.shared_filesystem, + jupyterhub_shared_endpoint=jupyterhub_shared_endpoint, + jupyterhub_profiles=self.config.profiles.dict()["jupyterlab"], + jupyterhub_image=_split_docker_image_name( + self.config.default_images.jupyterhub + ), + jupyterhub_overrides=[json.dumps(self.config.jupyterhub.overrides)], + jupyterhub_hub_extraEnv=json.dumps( + self.config.jupyterhub.overrides.get("hub", {}).get("extraEnv", []) + ), + idle_culler_settings=self.config.jupyterlab.idle_culler.dict(), + ) + + dask_gateway_vars = DaskGatewayInputVars( + dask_worker_image=_split_docker_image_name( + self.config.default_images.dask_worker + ), + dask_gateway_profiles=self.config.profiles.dict()["dask_worker"], + ) + + monitoring_vars = MonitoringInputVars( + monitoring_enabled=self.config.monitoring.enabled, + ) + + argo_workflows_vars = ArgoWorkflowsInputVars( + argo_workflows_enabled=self.config.argo_workflows.enabled, + argo_workflows_overrides=[json.dumps(self.config.argo_workflows.overrides)], + nebari_workflow_controller=self.config.argo_workflows.nebari_workflow_controller.enabled, + workflow_controller_image_tag=self.config.argo_workflows.nebari_workflow_controller.image_tag, + keycloak_read_only_user_credentials=keycloak_read_only_user_credentials, + ) + + kbatch_vars = KBatchInputVars( + kbatch_enabled=self.config.kbatch.enabled, + ) + + prefect_vars = PrefectInputVars( + prefect_enabled=self.config.prefect.enabled, + prefect_token=self.config.prefect.token, + prefect_image=self.config.prefect.image, + prefect_overrides=self.config.prefect.overrides, + ) + + clearml_vars = ClearMLInputVars( + clearml_enabled=self.config.clearml.enabled, + clearml_enable_forwardauth=self.config.clearml.enable_forward_auth, + clearml_overrides=[json.dumps(self.config.clearml.overrides)], + ) + + return { + **kubernetes_services_vars.dict(by_alias=True), + **conda_store_vars.dict(by_alias=True), + **jupyterhub_vars.dict(by_alias=True), + **dask_gateway_vars.dict(by_alias=True), + **monitoring_vars.dict(by_alias=True), + **argo_workflows_vars.dict(by_alias=True), + **kbatch_vars.dict(by_alias=True), + **prefect_vars.dict(by_alias=True), + **clearml_vars.dict(by_alias=True), + } + + def check(self, stage_outputs: Dict[str, Dict[str, Any]]): + directory = "stages/07-kubernetes-services" + import requests + + # suppress insecure warnings + import urllib3 + + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + + def _attempt_connect_url( + url, verify=False, num_attempts=NUM_ATTEMPTS, timeout=TIMEOUT + ): + for i in range(num_attempts): + response = requests.get(url, verify=verify, timeout=timeout) + if response.status_code < 400: + print(f"Attempt {i+1} health check succeeded for url={url}") + return True + else: + print(f"Attempt {i+1} health check failed for url={url}") + time.sleep(timeout) + return False + + services = stage_outputs[directory]["service_urls"]["value"] + for service_name, service in services.items(): + service_url = service["health_url"] + if service_url and not _attempt_connect_url(service_url): + print( + f"ERROR: Service {service_name} DOWN when checking url={service_url}" + ) + sys.exit(1) + + +@hookimpl +def nebari_stage() -> List[NebariStage]: + return [KubernetesServicesStage] diff --git a/src/_nebari/template/stages/07-kubernetes-services/argo-workflows.tf b/src/_nebari/stages/kubernetes_services/template/argo-workflows.tf similarity index 94% rename from src/_nebari/template/stages/07-kubernetes-services/argo-workflows.tf rename to src/_nebari/stages/kubernetes_services/template/argo-workflows.tf index 34927f2b5..ade8c2c3c 100644 --- a/src/_nebari/template/stages/07-kubernetes-services/argo-workflows.tf +++ b/src/_nebari/stages/kubernetes_services/template/argo-workflows.tf @@ -2,26 +2,22 @@ variable "argo-workflows-enabled" { description = "Argo Workflows enabled" type = bool - default = true } variable "argo-workflows-overrides" { description = "Argo Workflows helm chart overrides" type = list(string) - default = [] } variable "nebari-workflow-controller" { description = "Nebari Workflow Controller enabled" type = bool - default = true } variable "keycloak-read-only-user-credentials" { description = "Keycloak password for nebari-bot" type = map(string) - default = {} } variable "workflow-controller-image-tag" { diff --git a/src/_nebari/template/stages/07-kubernetes-services/clearml.tf b/src/_nebari/stages/kubernetes_services/template/clearml.tf similarity index 92% rename from src/_nebari/template/stages/07-kubernetes-services/clearml.tf rename to src/_nebari/stages/kubernetes_services/template/clearml.tf index 59c5ceba4..6c619fc65 100644 --- a/src/_nebari/template/stages/07-kubernetes-services/clearml.tf +++ b/src/_nebari/stages/kubernetes_services/template/clearml.tf @@ -2,20 +2,17 @@ variable "clearml-enabled" { description = "Clearml enabled or disabled" type = bool - default = false } variable "clearml-enable-forwardauth" { description = "Clearml enabled or disabled forward authentication" type = bool - default = false } variable "clearml-overrides" { description = "Clearml helm chart overrides" type = list(string) - default = [] } diff --git a/src/_nebari/template/stages/07-kubernetes-services/conda-store.tf b/src/_nebari/stages/kubernetes_services/template/conda-store.tf similarity index 93% rename from src/_nebari/template/stages/07-kubernetes-services/conda-store.tf rename to src/_nebari/stages/kubernetes_services/template/conda-store.tf index 330901ec1..904a17e8d 100644 --- a/src/_nebari/template/stages/07-kubernetes-services/conda-store.tf +++ b/src/_nebari/stages/kubernetes_services/template/conda-store.tf @@ -1,7 +1,6 @@ # ======================= VARIABLES ====================== variable "conda-store-environments" { description = "Conda-Store managed environments" - default = {} } variable "conda-store-filesystem-storage" { @@ -12,31 +11,31 @@ variable "conda-store-filesystem-storage" { variable "conda-store-object-storage" { description = "Conda-Store storage in GB for object storage. Conda-Store uses minio for object storage to be cloud agnostic. If empty default is var.conda-store-filesystem-storage value" type = string - default = null } variable "conda-store-extra-settings" { description = "Conda-Store extra traitlet settings to apply in `c.Class.key = value` form" type = map(any) - default = {} } variable "conda-store-extra-config" { description = "Additional traitlets configuration code to be ran" type = string - default = "" } variable "conda-store-image" { description = "Conda-Store image" type = string - default = "quansight/conda-store-server" } variable "conda-store-image-tag" { description = "Version of conda-store to use" type = string - default = "v0.4.14" +} + +variable "conda-store-service-token-scopes" { + description = "Map of services tokens and scopes for conda-store" + type = map(any) } # ====================== RESOURCES ======================= diff --git a/src/_nebari/template/stages/07-kubernetes-services/dask_gateway.tf b/src/_nebari/stages/kubernetes_services/template/dask_gateway.tf similarity index 98% rename from src/_nebari/template/stages/07-kubernetes-services/dask_gateway.tf rename to src/_nebari/stages/kubernetes_services/template/dask_gateway.tf index 20bbb340a..765be2753 100644 --- a/src/_nebari/template/stages/07-kubernetes-services/dask_gateway.tf +++ b/src/_nebari/stages/kubernetes_services/template/dask_gateway.tf @@ -9,7 +9,6 @@ variable "dask-worker-image" { variable "dask-gateway-profiles" { description = "Dask Gateway profiles to expose to user" - default = [] } diff --git a/src/_nebari/template/stages/07-kubernetes-services/forward-auth.tf b/src/_nebari/stages/kubernetes_services/template/forward-auth.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/forward-auth.tf rename to src/_nebari/stages/kubernetes_services/template/forward-auth.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/jupyterhub.tf b/src/_nebari/stages/kubernetes_services/template/jupyterhub.tf similarity index 91% rename from src/_nebari/template/stages/07-kubernetes-services/jupyterhub.tf rename to src/_nebari/stages/kubernetes_services/template/jupyterhub.tf index ed3a478d7..abb76a3a7 100644 --- a/src/_nebari/template/stages/07-kubernetes-services/jupyterhub.tf +++ b/src/_nebari/stages/kubernetes_services/template/jupyterhub.tf @@ -5,17 +5,11 @@ variable "cdsdashboards" { cds_hide_user_named_servers = bool cds_hide_user_dashboard_servers = bool }) - default = { - enabled = true - cds_hide_user_named_servers = true - cds_hide_user_dashboard_servers = false - } } variable "jupyterhub-theme" { description = "JupyterHub theme" type = map(any) - default = {} } variable "jupyterhub-image" { @@ -40,7 +34,6 @@ variable "jupyterhub-shared-storage" { variable "jupyterhub-shared-endpoint" { description = "JupyterHub shared storage nfs endpoint" type = string - default = null } variable "jupyterlab-image" { @@ -53,7 +46,17 @@ variable "jupyterlab-image" { variable "jupyterlab-profiles" { description = "JupyterHub profiles to expose to user" - default = [] +} + +variable "jupyterhub-hub-extraEnv" { + description = "Extracted overrides to merge with jupyterhub.hub.extraEnv" + type = string + default = "[]" +} + +variable "idle-culler-settings" { + description = "Idle culler timeout settings (in minutes)" + type = any } @@ -136,6 +139,6 @@ module "jupyterhub" { jupyterhub-logout-redirect-url = var.jupyterhub-logout-redirect-url jupyterhub-hub-extraEnv = var.jupyterhub-hub-extraEnv - idle-culler-settings = local.idle-culler-settings + idle-culler-settings = var.idle-culler-settings } diff --git a/src/_nebari/template/stages/07-kubernetes-services/jupyterhub_ssh.tf b/src/_nebari/stages/kubernetes_services/template/jupyterhub_ssh.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/jupyterhub_ssh.tf rename to src/_nebari/stages/kubernetes_services/template/jupyterhub_ssh.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/kbatch.tf b/src/_nebari/stages/kubernetes_services/template/kbatch.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/kbatch.tf rename to src/_nebari/stages/kubernetes_services/template/kbatch.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/locals.tf b/src/_nebari/stages/kubernetes_services/template/locals.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/locals.tf rename to src/_nebari/stages/kubernetes_services/template/locals.tf diff --git a/src/_nebari/cli/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/__init__.py similarity index 100% rename from src/_nebari/cli/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/__init__.py diff --git a/src/_nebari/template/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/__init__.py similarity index 100% rename from src/_nebari/template/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/__init__.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/forwardauth/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/forwardauth/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/forwardauth/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/forwardauth/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/forwardauth/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-mount/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-mount/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-mount/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-mount/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-mount/outputs.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-mount/outputs.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-mount/outputs.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-mount/outputs.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-mount/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-mount/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-mount/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-mount/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-server/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-server/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-server/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-server/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-server/output.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-server/output.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-server/output.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-server/output.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-server/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-server/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/nfs-server/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/nfs-server/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/__init__.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/__init__.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/argo-workflows/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/argo-workflows/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/argo-workflows/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/values.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/argo-workflows/values.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/values.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/argo-workflows/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/argo-workflows/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/argo-workflows/versions.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/versions.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/argo-workflows/versions.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/argo-workflows/versions.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/Chart.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/Chart.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/Chart.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/Chart.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/LICENSE b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/LICENSE similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/LICENSE rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/LICENSE diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/README.md b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/README.md similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/README.md rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/README.md diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/charts/mongodb-10.3.7.tgz b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/charts/mongodb-10.3.7.tgz similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/charts/mongodb-10.3.7.tgz rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/charts/mongodb-10.3.7.tgz diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/charts/redis-10.9.0.tgz b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/charts/redis-10.9.0.tgz similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/charts/redis-10.9.0.tgz rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/charts/redis-10.9.0.tgz diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/NOTES.txt b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/NOTES.txt similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/NOTES.txt rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/NOTES.txt diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/_helpers.tpl b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/_helpers.tpl similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/_helpers.tpl rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/_helpers.tpl diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-agent.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-agent.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-agent.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-agent.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-agentservices.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-agentservices.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-agentservices.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-agentservices.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-apiserver.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-apiserver.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-apiserver.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-apiserver.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-elastic.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-elastic.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-elastic.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-elastic.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-fileserver.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-fileserver.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-fileserver.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-fileserver.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-webserver.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-webserver.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/deployment-webserver.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/deployment-webserver.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/ingress.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/ingress.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/ingress.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/ingress.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/pvc-agentservices.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/pvc-agentservices.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/pvc-agentservices.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/pvc-agentservices.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/pvc-apiserver.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/pvc-apiserver.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/pvc-apiserver.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/pvc-apiserver.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/pvc-fileserver.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/pvc-fileserver.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/pvc-fileserver.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/pvc-fileserver.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/secret-agent.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/secret-agent.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/secret-agent.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/secret-agent.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/secrets.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/secrets.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/secrets.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/secrets.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/service-apiserver.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/service-apiserver.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/service-apiserver.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/service-apiserver.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/service-fileserver.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/service-fileserver.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/service-fileserver.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/service-fileserver.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/service-webserver.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/service-webserver.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/templates/service-webserver.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/templates/service-webserver.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/values.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/chart/values.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/chart/values.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/ingress.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/ingress.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/ingress.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/ingress.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/clearml/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/clearml/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/__init__.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/__init__.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/config/__init__.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/config/__init__.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/config/conda_store_config.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/config/conda_store_config.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/config/conda_store_config.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/config/conda_store_config.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/output.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/output.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/output.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/output.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/server.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/server.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/server.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/server.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/storage.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/storage.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/storage.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/storage.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/worker.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/worker.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/worker.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/conda-store/worker.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/__init__.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/__init__.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/controler.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/controler.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/controler.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/controler.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/crds.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/crds.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/crds.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/crds.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/files/__init__.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/files/__init__.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/controller_config.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/files/controller_config.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/controller_config.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/files/controller_config.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/gateway_config.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/files/gateway_config.py similarity index 98% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/gateway_config.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/files/gateway_config.py index ae2532d55..ccca6ba39 100644 --- a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/gateway_config.py +++ b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/files/gateway_config.py @@ -48,6 +48,9 @@ def dask_gateway_config(path="/var/lib/dask-gateway/config.json"): c.KubeClusterConfig.worker_cores_limit = config["cluster"]["worker_cores_limit"] c.KubeClusterConfig.worker_memory = config["cluster"]["worker_memory"] c.KubeClusterConfig.worker_memory_limit = config["cluster"]["worker_memory_limit"] +c.KubeClusterConfig.worker_threads = config["cluster"].get( + "worker_threads", config["cluster"]["worker_cores"] +) c.KubeClusterConfig.worker_extra_container_config = config["cluster"][ "worker_extra_container_config" ] diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/gateway.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/gateway.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/gateway.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/gateway.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/middleware.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/middleware.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/middleware.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/middleware.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/outputs.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/outputs.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/outputs.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/outputs.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/dask-gateway/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub-ssh/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub-ssh/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub-ssh/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub-ssh/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub-ssh/sftp.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub-ssh/sftp.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub-ssh/sftp.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub-ssh/sftp.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub-ssh/ssh.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub-ssh/ssh.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub-ssh/ssh.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub-ssh/ssh.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub-ssh/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub-ssh/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub-ssh/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub-ssh/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/config/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/__init__.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/conda-store/config/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/__init__.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/configmaps.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/configmaps.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/configmaps.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/configmaps.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/__init__.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/__init__.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/ipython/__init__.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/dask-gateway/files/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/ipython/__init__.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/ipython/ipython_config.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/ipython/ipython_config.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/ipython/ipython_config.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/ipython/ipython_config.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyter/jupyter_server_config.py.tpl b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyter/jupyter_server_config.py.tpl similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyter/jupyter_server_config.py.tpl rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyter/jupyter_server_config.py.tpl diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/01-theme.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/01-theme.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/01-theme.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/01-theme.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/02-spawner.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/03-profiles.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/__init__.py b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/__init__.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/__init__.py rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterhub/__init__.py diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterlab/overrides.json b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterlab/overrides.json similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterlab/overrides.json rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/jupyterlab/overrides.json diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/skel/.bash_logout b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/skel/.bash_logout similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/skel/.bash_logout rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/skel/.bash_logout diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/skel/.bashrc b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/skel/.bashrc similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/skel/.bashrc rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/skel/.bashrc diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/skel/.profile b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/skel/.profile similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/skel/.profile rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/files/skel/.profile diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/outputs.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/outputs.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/outputs.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/outputs.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/values.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/values.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/values.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/jupyterhub/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/kbatch/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/kbatch/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/kbatch/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/kbatch/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/kbatch/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/kbatch/values.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/kbatch/values.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/kbatch/values.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/kbatch/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/kbatch/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/kbatch/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/kbatch/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/kbatch/versions.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/kbatch/versions.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/kbatch/versions.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/kbatch/versions.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/keycloak-client/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/keycloak-client/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/keycloak-client/outputs.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/outputs.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/keycloak-client/outputs.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/outputs.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/keycloak-client/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/keycloak-client/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/keycloak-client/versions.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/versions.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/keycloak-client/versions.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/keycloak-client/versions.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/minio/ingress.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/minio/ingress.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/minio/ingress.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/minio/ingress.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/minio/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/minio/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/minio/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/minio/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/minio/outputs.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/minio/outputs.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/minio/outputs.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/minio/outputs.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/minio/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/minio/values.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/minio/values.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/minio/values.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/minio/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/minio/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/minio/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/minio/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/cluster_information.json b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/cluster_information.json similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/cluster_information.json rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/cluster_information.json diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/conda_store.json b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/conda_store.json similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/conda_store.json rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/conda_store.json diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/jupyterhub_dashboard.json b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/jupyterhub_dashboard.json similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/jupyterhub_dashboard.json rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/jupyterhub_dashboard.json diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/keycloak.json b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/keycloak.json similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/keycloak.json rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/keycloak.json diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/traefik.json b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/traefik.json similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/traefik.json rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/traefik.json diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/usage_report.json b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/usage_report.json similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/dashboards/Main/usage_report.json rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/dashboards/Main/usage_report.json diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/values.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/values.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/values.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/versions.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/versions.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/monitoring/versions.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/monitoring/versions.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/postgresql/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/postgresql/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/postgresql/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/postgresql/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/postgresql/outputs.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/postgresql/outputs.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/postgresql/outputs.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/postgresql/outputs.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/postgresql/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/postgresql/values.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/postgresql/values.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/postgresql/values.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/postgresql/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/postgresql/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/postgresql/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/postgresql/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/.helmignore b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/.helmignore similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/.helmignore rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/.helmignore diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/Chart.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/Chart.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/Chart.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/Chart.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/templates/_helpers.tpl b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/templates/_helpers.tpl similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/templates/_helpers.tpl rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/templates/_helpers.tpl diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/templates/prefect.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/templates/prefect.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/templates/prefect.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/templates/prefect.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/templates/secret.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/templates/secret.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/templates/secret.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/templates/secret.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/values.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/chart/values.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/chart/values.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/values.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/values.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/values.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/prefect/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/prefect/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/redis/main.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/redis/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/redis/main.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/redis/main.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/redis/outputs.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/redis/outputs.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/redis/outputs.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/redis/outputs.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/redis/values.yaml b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/redis/values.yaml similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/redis/values.yaml rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/redis/values.yaml diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/redis/variables.tf b/src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/redis/variables.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/redis/variables.tf rename to src/_nebari/stages/kubernetes_services/template/modules/kubernetes/services/redis/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/monitoring.tf b/src/_nebari/stages/kubernetes_services/template/monitoring.tf similarity index 95% rename from src/_nebari/template/stages/07-kubernetes-services/monitoring.tf rename to src/_nebari/stages/kubernetes_services/template/monitoring.tf index 255b163d5..ec20a75ba 100644 --- a/src/_nebari/template/stages/07-kubernetes-services/monitoring.tf +++ b/src/_nebari/stages/kubernetes_services/template/monitoring.tf @@ -1,7 +1,6 @@ variable "monitoring-enabled" { description = "Prometheus and Grafana monitoring enabled" type = bool - default = true } module "monitoring" { diff --git a/src/_nebari/template/stages/07-kubernetes-services/outputs.tf b/src/_nebari/stages/kubernetes_services/template/outputs.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/outputs.tf rename to src/_nebari/stages/kubernetes_services/template/outputs.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/prefect.tf b/src/_nebari/stages/kubernetes_services/template/prefect.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/prefect.tf rename to src/_nebari/stages/kubernetes_services/template/prefect.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/providers.tf b/src/_nebari/stages/kubernetes_services/template/providers.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/providers.tf rename to src/_nebari/stages/kubernetes_services/template/providers.tf diff --git a/src/_nebari/stages/kubernetes_services/template/variables.tf b/src/_nebari/stages/kubernetes_services/template/variables.tf new file mode 100644 index 000000000..63c6c5b4f --- /dev/null +++ b/src/_nebari/stages/kubernetes_services/template/variables.tf @@ -0,0 +1,40 @@ +# Variables that are shared between multiple kubernetes services + +variable "name" { + description = "Prefix name to assign to kubernetes resources" + type = string +} + +variable "environment" { + description = "Kubernetes namespace to create resources within" + type = string +} + +variable "endpoint" { + description = "Endpoint for services" + type = string +} + +variable "realm_id" { + description = "Keycloak realm id for creating clients" + type = string +} + +variable "node_groups" { + description = "Node group selectors for kubernetes resources" + type = map(object({ + key = string + value = string + })) +} + +variable "jupyterhub-logout-redirect-url" { + description = "Next redirect destination following a Keycloak logout" + type = string + default = "" +} + +variable "conda-store-default-namespace" { + description = "Default conda-store namespace name" + type = string +} diff --git a/src/_nebari/template/stages/07-kubernetes-services/versions.tf b/src/_nebari/stages/kubernetes_services/template/versions.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/versions.tf rename to src/_nebari/stages/kubernetes_services/template/versions.tf diff --git a/src/_nebari/stages/nebari_tf_extensions/__init__.py b/src/_nebari/stages/nebari_tf_extensions/__init__.py new file mode 100644 index 000000000..cf2bf7e5a --- /dev/null +++ b/src/_nebari/stages/nebari_tf_extensions/__init__.py @@ -0,0 +1,81 @@ +import typing +from typing import Any, Dict, List + +from _nebari.stages.base import NebariTerraformStage +from _nebari.stages.tf_objects import ( + NebariHelmProvider, + NebariKubernetesProvider, + NebariTerraformState, +) +from nebari import schema +from nebari.hookspecs import NebariStage, hookimpl + + +class NebariExtensionEnv(schema.Base): + name: str + value: str + + +class NebariExtension(schema.Base): + name: str + image: str + urlslug: str + private: bool = False + oauth2client: bool = False + keycloakadmin: bool = False + jwt: bool = False + nebariconfigyaml: bool = False + logout: typing.Optional[str] + envs: typing.Optional[typing.List[NebariExtensionEnv]] + + +class HelmExtension(schema.Base): + name: str + repository: str + chart: str + version: str + overrides: typing.Dict = {} + + +class InputSchema(schema.Base): + helm_extensions: typing.List[HelmExtension] = [] + tf_extensions: typing.List[NebariExtension] = [] + + +class OutputSchema(schema.Base): + pass + + +class NebariTFExtensionsStage(NebariTerraformStage): + name = "08-nebari-tf-extensions" + priority = 80 + + input_schema = InputSchema + output_schema = OutputSchema + + def tf_objects(self) -> List[Dict]: + return [ + NebariTerraformState(self.name, self.config), + NebariKubernetesProvider(self.config), + NebariHelmProvider(self.config), + ] + + def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): + return { + "environment": self.config.namespace, + "endpoint": self.config.domain, + "realm_id": stage_outputs["stages/06-kubernetes-keycloak-configuration"][ + "realm_id" + ]["value"], + "tf_extensions": [_.dict() for _ in self.config.tf_extensions], + "nebari_config_yaml": self.config.dict(), + "keycloak_nebari_bot_password": stage_outputs[ + "stages/05-kubernetes-keycloak" + ]["keycloak_nebari_bot_password"]["value"], + "helm_extensions": [_.dict() for _ in self.config.helm_extensions], + } + + +@hookimpl +def nebari_stage() -> List[NebariStage]: + return [NebariTFExtensionsStage] diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/helm-extension.tf b/src/_nebari/stages/nebari_tf_extensions/template/helm-extension.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/helm-extension.tf rename to src/_nebari/stages/nebari_tf_extensions/template/helm-extension.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/modules/helm-extensions/main.tf b/src/_nebari/stages/nebari_tf_extensions/template/modules/helm-extensions/main.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/modules/helm-extensions/main.tf rename to src/_nebari/stages/nebari_tf_extensions/template/modules/helm-extensions/main.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/modules/helm-extensions/variables.tf b/src/_nebari/stages/nebari_tf_extensions/template/modules/helm-extensions/variables.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/modules/helm-extensions/variables.tf rename to src/_nebari/stages/nebari_tf_extensions/template/modules/helm-extensions/variables.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/modules/nebariextension/ingress.tf b/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/ingress.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/modules/nebariextension/ingress.tf rename to src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/ingress.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/modules/nebariextension/keycloak-config.tf b/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/keycloak-config.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/modules/nebariextension/keycloak-config.tf rename to src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/keycloak-config.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/modules/nebariextension/locals.tf b/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/locals.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/modules/nebariextension/locals.tf rename to src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/locals.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/modules/nebariextension/main.tf b/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/main.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/modules/nebariextension/main.tf rename to src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/main.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/modules/nebariextension/variables.tf b/src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/variables.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/modules/nebariextension/variables.tf rename to src/_nebari/stages/nebari_tf_extensions/template/modules/nebariextension/variables.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/nebari-config.tf b/src/_nebari/stages/nebari_tf_extensions/template/nebari-config.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/nebari-config.tf rename to src/_nebari/stages/nebari_tf_extensions/template/nebari-config.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/providers.tf b/src/_nebari/stages/nebari_tf_extensions/template/providers.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/providers.tf rename to src/_nebari/stages/nebari_tf_extensions/template/providers.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/tf-extensions.tf b/src/_nebari/stages/nebari_tf_extensions/template/tf-extensions.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/tf-extensions.tf rename to src/_nebari/stages/nebari_tf_extensions/template/tf-extensions.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/variables.tf b/src/_nebari/stages/nebari_tf_extensions/template/variables.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/variables.tf rename to src/_nebari/stages/nebari_tf_extensions/template/variables.tf diff --git a/src/_nebari/template/stages/08-nebari-tf-extensions/versions.tf b/src/_nebari/stages/nebari_tf_extensions/template/versions.tf similarity index 100% rename from src/_nebari/template/stages/08-nebari-tf-extensions/versions.tf rename to src/_nebari/stages/nebari_tf_extensions/template/versions.tf diff --git a/src/_nebari/stages/state_imports.py b/src/_nebari/stages/state_imports.py deleted file mode 100644 index 77d3ce367..000000000 --- a/src/_nebari/stages/state_imports.py +++ /dev/null @@ -1,50 +0,0 @@ -import os - - -def stage_01_terraform_state(stage_outputs, config): - if config["provider"] == "do": - return [ - ( - "module.terraform-state.module.spaces.digitalocean_spaces_bucket.main", - f"{config['digital_ocean']['region']},{config['project_name']}-{config['namespace']}-terraform-state", - ) - ] - elif config["provider"] == "gcp": - return [ - ( - "module.terraform-state.module.gcs.google_storage_bucket.static-site", - f"{config['project_name']}-{config['namespace']}-terraform-state", - ) - ] - elif config["provider"] == "azure": - subscription_id = os.environ["ARM_SUBSCRIPTION_ID"] - resource_name_prefix = f"{config['project_name']}-{config['namespace']}" - state_resource_group_name = f"{resource_name_prefix}-state" - state_resource_name_prefix_safe = resource_name_prefix.replace("-", "") - resource_group_url = f"/subscriptions/{subscription_id}/resourceGroups/{state_resource_group_name}" - - return [ - ( - "module.terraform-state.azurerm_resource_group.terraform-state-resource-group", - resource_group_url, - ), - ( - "module.terraform-state.azurerm_storage_account.terraform-state-storage-account", - f"{resource_group_url}/providers/Microsoft.Storage/storageAccounts/{state_resource_name_prefix_safe}{config['azure']['storage_account_postfix']}", - ), - ( - "module.terraform-state.azurerm_storage_container.storage_container", - f"https://{state_resource_name_prefix_safe}{config['azure']['storage_account_postfix']}.blob.core.windows.net/{resource_name_prefix}-state", - ), - ] - elif config["provider"] == "aws": - return [ - ( - "module.terraform-state.aws_s3_bucket.terraform-state", - f"{config['project_name']}-{config['namespace']}-terraform-state", - ), - ( - "module.terraform-state.aws_dynamodb_table.terraform-state-lock", - f"{config['project_name']}-{config['namespace']}-terraform-state-lock", - ), - ] diff --git a/src/_nebari/stages/terraform_state/__init__.py b/src/_nebari/stages/terraform_state/__init__.py new file mode 100644 index 000000000..ed01f6eb5 --- /dev/null +++ b/src/_nebari/stages/terraform_state/__init__.py @@ -0,0 +1,210 @@ +import contextlib +import enum +import inspect +import os +import pathlib +import typing +from typing import Any, Dict, List, Tuple + +from _nebari.stages.base import NebariTerraformStage +from _nebari.utils import modified_environ +from nebari import schema +from nebari.hookspecs import NebariStage, hookimpl + + +class DigitalOceanInputVars(schema.Base): + name: str + namespace: str + region: str + + +class GCPInputVars(schema.Base): + name: str + namespace: str + region: str + + +class AzureInputVars(schema.Base): + name: str + namespace: str + region: str + storage_account_postfix: str + state_resource_group_name: str + + +class AWSInputVars(schema.Base): + name: str + namespace: str + + +@schema.yaml_object(schema.yaml) +class TerraformStateEnum(str, enum.Enum): + remote = "remote" + local = "local" + existing = "existing" + + @classmethod + def to_yaml(cls, representer, node): + return representer.represent_str(node.value) + + +class TerraformState(schema.Base): + type: TerraformStateEnum = TerraformStateEnum.remote + backend: typing.Optional[str] + config: typing.Dict[str, str] = {} + + +class InputSchema(schema.Base): + terraform_state: TerraformState = TerraformState() + + +class OutputSchema(schema.Base): + pass + + +class TerraformStateStage(NebariTerraformStage): + name = "01-terraform-state" + priority = 10 + + input_schema = InputSchema + output_schema = OutputSchema + + @property + def template_directory(self): + return ( + pathlib.Path(inspect.getfile(self.__class__)).parent + / "template" + / self.config.provider.value + ) + + @property + def stage_prefix(self): + return pathlib.Path("stages") / self.name / self.config.provider.value + + def state_imports(self) -> List[Tuple[str, str]]: + if self.config.provider == schema.ProviderEnum.do: + return [ + ( + "module.terraform-state.module.spaces.digitalocean_spaces_bucket.main", + f"{self.config.digital_ocean.region},{self.config.project_name}-{self.config.namespace}-terraform-state", + ) + ] + elif self.config.provider == schema.ProviderEnum.gcp: + return [ + ( + "module.terraform-state.module.gcs.google_storage_bucket.static-site", + f"{self.config.project_name}-{self.config.namespace}-terraform-state", + ) + ] + elif self.config.provider == schema.ProviderEnum.azure: + subscription_id = os.environ["ARM_SUBSCRIPTION_ID"] + resource_name_prefix = f"{self.config.project_name}-{self.config.namespace}" + state_resource_group_name = f"{resource_name_prefix}-state" + state_resource_name_prefix_safe = resource_name_prefix.replace("-", "") + resource_group_url = f"/subscriptions/{subscription_id}/resourceGroups/{state_resource_group_name}" + + return [ + ( + "module.terraform-state.azurerm_resource_group.terraform-state-resource-group", + resource_group_url, + ), + ( + "module.terraform-state.azurerm_storage_account.terraform-state-storage-account", + f"{resource_group_url}/providers/Microsoft.Storage/storageAccounts/{state_resource_name_prefix_safe}{self.config.azure.storage_account_postfix}", + ), + ( + "module.terraform-state.azurerm_storage_container.storage_container", + f"https://{state_resource_name_prefix_safe}{self.config.azure.storage_account_postfix}.blob.core.windows.net/{resource_name_prefix}-state", + ), + ] + elif self.config.provider == schema.ProviderEnum.aws: + return [ + ( + "module.terraform-state.aws_s3_bucket.terraform-state", + f"{self.config.project_name}-{self.config.namespace}-terraform-state", + ), + ( + "module.terraform-state.aws_dynamodb_table.terraform-state-lock", + f"{self.config.project_name}-{self.config.namespace}-terraform-state-lock", + ), + ] + else: + return [] + + def tf_objects(self) -> List[Dict]: + return [] + + def input_vars(self, stage_outputs: Dict[str, Dict[str, Any]]): + if self.config.provider == schema.ProviderEnum.do: + return DigitalOceanInputVars( + name=self.config.project_name, + namespace=self.config.namespace, + region=self.config.digital_ocean.region, + ).dict() + elif self.config.provider == schema.ProviderEnum.gcp: + return GCPInputVars( + name=self.config.project_name, + namespace=self.config.namespace, + region=self.config.google_cloud_platform.region, + ).dict() + elif self.config.provider == schema.ProviderEnum.aws: + return AWSInputVars( + name=self.config.project_name, + namespace=self.config.namespace, + ).dict() + elif self.config.provider == schema.ProviderEnum.azure: + return AzureInputVars( + name=self.config.project_name, + namespace=self.config.namespace, + region=self.config.azure.region, + storage_account_postfix=self.config.azure.storage_account_postfix, + state_resource_group_name=f"{self.config.project_name}-{self.config.namespace}-state", + ).dict() + elif ( + self.config.provider == schema.ProviderEnum.local + or self.config.provider == schema.ProviderEnum.existing + ): + return {} + else: + ValueError(f"Unknown provider: {self.config.provider}") + + @contextlib.contextmanager + def deploy(self, stage_outputs: Dict[str, Dict[str, Any]]): + with super().deploy(stage_outputs): + env_mapping = {} + # DigitalOcean terraform remote state using Spaces Bucket + # assumes aws credentials thus we set them to match spaces credentials + if self.config.provider == schema.ProviderEnum.do: + env_mapping.update( + { + "AWS_ACCESS_KEY_ID": os.environ["SPACES_ACCESS_KEY_ID"], + "AWS_SECRET_ACCESS_KEY": os.environ["SPACES_SECRET_ACCESS_KEY"], + } + ) + + with modified_environ(**env_mapping): + yield + + @contextlib.contextmanager + def destroy( + self, stage_outputs: Dict[str, Dict[str, Any]], status: Dict[str, bool] + ): + with super().destroy(stage_outputs, status): + env_mapping = {} + # DigitalOcean terraform remote state using Spaces Bucket + # assumes aws credentials thus we set them to match spaces credentials + if self.config.provider == schema.ProviderEnum.do: + env_mapping.update( + { + "AWS_ACCESS_KEY_ID": os.environ["SPACES_ACCESS_KEY_ID"], + "AWS_SECRET_ACCESS_KEY": os.environ["SPACES_SECRET_ACCESS_KEY"], + } + ) + + with modified_environ(**env_mapping): + yield + + +@hookimpl +def nebari_stage() -> List[NebariStage]: + return [TerraformStateStage] diff --git a/src/_nebari/template/stages/01-terraform-state/aws/main.tf b/src/_nebari/stages/terraform_state/template/aws/main.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/aws/main.tf rename to src/_nebari/stages/terraform_state/template/aws/main.tf diff --git a/src/_nebari/template/stages/01-terraform-state/aws/modules/terraform-state/main.tf b/src/_nebari/stages/terraform_state/template/aws/modules/terraform-state/main.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/aws/modules/terraform-state/main.tf rename to src/_nebari/stages/terraform_state/template/aws/modules/terraform-state/main.tf diff --git a/src/_nebari/template/stages/01-terraform-state/aws/modules/terraform-state/output.tf b/src/_nebari/stages/terraform_state/template/aws/modules/terraform-state/output.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/aws/modules/terraform-state/output.tf rename to src/_nebari/stages/terraform_state/template/aws/modules/terraform-state/output.tf diff --git a/src/_nebari/template/stages/01-terraform-state/aws/modules/terraform-state/variables.tf b/src/_nebari/stages/terraform_state/template/aws/modules/terraform-state/variables.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/aws/modules/terraform-state/variables.tf rename to src/_nebari/stages/terraform_state/template/aws/modules/terraform-state/variables.tf diff --git a/src/_nebari/template/stages/01-terraform-state/azure/main.tf b/src/_nebari/stages/terraform_state/template/azure/main.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/azure/main.tf rename to src/_nebari/stages/terraform_state/template/azure/main.tf diff --git a/src/_nebari/template/stages/01-terraform-state/azure/modules/terraform-state/main.tf b/src/_nebari/stages/terraform_state/template/azure/modules/terraform-state/main.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/azure/modules/terraform-state/main.tf rename to src/_nebari/stages/terraform_state/template/azure/modules/terraform-state/main.tf diff --git a/src/_nebari/template/stages/01-terraform-state/azure/modules/terraform-state/variables.tf b/src/_nebari/stages/terraform_state/template/azure/modules/terraform-state/variables.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/azure/modules/terraform-state/variables.tf rename to src/_nebari/stages/terraform_state/template/azure/modules/terraform-state/variables.tf diff --git a/src/_nebari/template/stages/01-terraform-state/do/main.tf b/src/_nebari/stages/terraform_state/template/do/main.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/do/main.tf rename to src/_nebari/stages/terraform_state/template/do/main.tf diff --git a/src/_nebari/template/stages/01-terraform-state/do/modules/spaces/main.tf b/src/_nebari/stages/terraform_state/template/do/modules/spaces/main.tf similarity index 81% rename from src/_nebari/template/stages/01-terraform-state/do/modules/spaces/main.tf rename to src/_nebari/stages/terraform_state/template/do/modules/spaces/main.tf index e0e809c03..fc2d34c60 100644 --- a/src/_nebari/template/stages/01-terraform-state/do/modules/spaces/main.tf +++ b/src/_nebari/stages/terraform_state/template/do/modules/spaces/main.tf @@ -5,4 +5,8 @@ resource "digitalocean_spaces_bucket" "main" { force_destroy = var.force_destroy acl = (var.public ? "public-read" : "private") + + versioning { + enabled = false + } } diff --git a/src/_nebari/template/stages/01-terraform-state/do/modules/spaces/variables.tf b/src/_nebari/stages/terraform_state/template/do/modules/spaces/variables.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/do/modules/spaces/variables.tf rename to src/_nebari/stages/terraform_state/template/do/modules/spaces/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/modules/registry/versions.tf b/src/_nebari/stages/terraform_state/template/do/modules/spaces/versions.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/modules/registry/versions.tf rename to src/_nebari/stages/terraform_state/template/do/modules/spaces/versions.tf diff --git a/src/_nebari/template/stages/01-terraform-state/do/modules/terraform-state/main.tf b/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/main.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/do/modules/terraform-state/main.tf rename to src/_nebari/stages/terraform_state/template/do/modules/terraform-state/main.tf diff --git a/src/_nebari/template/stages/01-terraform-state/do/modules/terraform-state/variables.tf b/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/variables.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/do/modules/terraform-state/variables.tf rename to src/_nebari/stages/terraform_state/template/do/modules/terraform-state/variables.tf diff --git a/src/_nebari/template/stages/02-infrastructure/do/versions.tf b/src/_nebari/stages/terraform_state/template/do/modules/terraform-state/versions.tf similarity index 100% rename from src/_nebari/template/stages/02-infrastructure/do/versions.tf rename to src/_nebari/stages/terraform_state/template/do/modules/terraform-state/versions.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/__init__.py b/src/_nebari/stages/terraform_state/template/existing/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/__init__.py rename to src/_nebari/stages/terraform_state/template/existing/main.tf diff --git a/src/_nebari/template/stages/01-terraform-state/gcp/main.tf b/src/_nebari/stages/terraform_state/template/gcp/main.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/gcp/main.tf rename to src/_nebari/stages/terraform_state/template/gcp/main.tf diff --git a/src/_nebari/template/stages/01-terraform-state/gcp/modules/gcs/main.tf b/src/_nebari/stages/terraform_state/template/gcp/modules/gcs/main.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/gcp/modules/gcs/main.tf rename to src/_nebari/stages/terraform_state/template/gcp/modules/gcs/main.tf diff --git a/src/_nebari/template/stages/01-terraform-state/gcp/modules/gcs/variables.tf b/src/_nebari/stages/terraform_state/template/gcp/modules/gcs/variables.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/gcp/modules/gcs/variables.tf rename to src/_nebari/stages/terraform_state/template/gcp/modules/gcs/variables.tf diff --git a/src/_nebari/template/stages/01-terraform-state/gcp/modules/terraform-state/main.tf b/src/_nebari/stages/terraform_state/template/gcp/modules/terraform-state/main.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/gcp/modules/terraform-state/main.tf rename to src/_nebari/stages/terraform_state/template/gcp/modules/terraform-state/main.tf diff --git a/src/_nebari/template/stages/01-terraform-state/gcp/modules/terraform-state/variables.tf b/src/_nebari/stages/terraform_state/template/gcp/modules/terraform-state/variables.tf similarity index 100% rename from src/_nebari/template/stages/01-terraform-state/gcp/modules/terraform-state/variables.tf rename to src/_nebari/stages/terraform_state/template/gcp/modules/terraform-state/variables.tf diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/ipython/__init__.py b/src/_nebari/stages/terraform_state/template/local/main.tf similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/ipython/__init__.py rename to src/_nebari/stages/terraform_state/template/local/main.tf diff --git a/src/_nebari/stages/tf_objects.py b/src/_nebari/stages/tf_objects.py index 3a961f678..f35b6aed1 100644 --- a/src/_nebari/stages/tf_objects.py +++ b/src/_nebari/stages/tf_objects.py @@ -1,43 +1,16 @@ -from pathlib import Path -from typing import Dict - -from _nebari.provider.terraform import ( - Data, - Provider, - TerraformBackend, - tf_render_objects, -) +from _nebari.provider.terraform import Data, Provider, TerraformBackend from _nebari.utils import deep_merge +from nebari import schema -def NebariAWSProvider(nebari_config: Dict): - return Provider("aws", region=nebari_config["amazon_web_services"]["region"]) - - -def NebariGCPProvider(nebari_config: Dict): - return Provider( - "google", - project=nebari_config["google_cloud_platform"]["project"], - region=nebari_config["google_cloud_platform"]["region"], - ) - - -def NebariAzureProvider(nebari_config: Dict): - return Provider("azurerm", features={}) - - -def NebariDigitalOceanProvider(nebari_config: Dict): - return Provider("digitalocean") - - -def NebariKubernetesProvider(nebari_config: Dict): - if nebari_config["provider"] == "aws": - cluster_name = f"{nebari_config['project_name']}-{nebari_config['namespace']}" +def NebariKubernetesProvider(nebari_config: schema.Main): + if nebari_config.provider == "aws": + cluster_name = f"{nebari_config.escaped_project_name}-{nebari_config.namespace}" # The AWS provider needs to be added, as we are using aws related resources #1254 return deep_merge( Data("aws_eks_cluster", "default", name=cluster_name), Data("aws_eks_cluster_auth", "default", name=cluster_name), - Provider("aws", region=nebari_config["amazon_web_services"]["region"]), + Provider("aws", region=nebari_config.amazon_web_services.region), Provider( "kubernetes", experiments={"manifest_resource": True}, @@ -52,9 +25,9 @@ def NebariKubernetesProvider(nebari_config: Dict): ) -def NebariHelmProvider(nebari_config: Dict): - if nebari_config["provider"] == "aws": - cluster_name = f"{nebari_config['project_name']}-{nebari_config['namespace']}" +def NebariHelmProvider(nebari_config: schema.Main): + if nebari_config.provider == "aws": + cluster_name = f"{nebari_config.escaped_project_name}-{nebari_config.namespace}" return deep_merge( Data("aws_eks_cluster", "default", name=cluster_name), @@ -71,229 +44,67 @@ def NebariHelmProvider(nebari_config: Dict): return Provider("helm") -def NebariTerraformState(directory: str, nebari_config: Dict): - if nebari_config["terraform_state"]["type"] == "local": +def NebariTerraformState(directory: str, nebari_config: schema.Main): + if nebari_config.terraform_state.type == "local": return {} - elif nebari_config["terraform_state"]["type"] == "existing": + elif nebari_config.terraform_state.type == "existing": return TerraformBackend( nebari_config["terraform_state"]["backend"], **nebari_config["terraform_state"]["config"], ) - elif nebari_config["provider"] == "aws": + elif nebari_config.provider == "aws": return TerraformBackend( "s3", - bucket=f"{nebari_config['project_name']}-{nebari_config['namespace']}-terraform-state", - key=f"terraform/{nebari_config['project_name']}-{nebari_config['namespace']}/{directory}.tfstate", - region=nebari_config["amazon_web_services"]["region"], + bucket=f"{nebari_config.escaped_project_name}-{nebari_config.namespace}-terraform-state", + key=f"terraform/{nebari_config.escaped_project_name}-{nebari_config.namespace}/{directory}.tfstate", + region=nebari_config.amazon_web_services.region, encrypt=True, - dynamodb_table=f"{nebari_config['project_name']}-{nebari_config['namespace']}-terraform-state-lock", + dynamodb_table=f"{nebari_config.escaped_project_name}-{nebari_config.namespace}-terraform-state-lock", ) - elif nebari_config["provider"] == "gcp": + elif nebari_config.provider == "gcp": return TerraformBackend( "gcs", - bucket=f"{nebari_config['project_name']}-{nebari_config['namespace']}-terraform-state", - prefix=f"terraform/{nebari_config['project_name']}/{directory}", + bucket=f"{nebari_config.escaped_project_name}-{nebari_config.namespace}-terraform-state", + prefix=f"terraform/{nebari_config.escaped_project_name}/{directory}", ) - elif nebari_config["provider"] == "do": + elif nebari_config.provider == "do": return TerraformBackend( "s3", - endpoint=f"{nebari_config['digital_ocean']['region']}.digitaloceanspaces.com", + endpoint=f"{nebari_config.digital_ocean.region}.digitaloceanspaces.com", region="us-west-1", # fake aws region required by terraform - bucket=f"{nebari_config['project_name']}-{nebari_config['namespace']}-terraform-state", - key=f"terraform/{nebari_config['project_name']}-{nebari_config['namespace']}/{directory}.tfstate", + bucket=f"{nebari_config.escaped_project_name}-{nebari_config.namespace}-terraform-state", + key=f"terraform/{nebari_config.escaped_project_name}-{nebari_config.namespace}/{directory}.tfstate", skip_credentials_validation=True, skip_metadata_api_check=True, ) - elif nebari_config["provider"] == "azure": + elif nebari_config.provider == "azure": return TerraformBackend( "azurerm", - resource_group_name=f"{nebari_config['project_name']}-{nebari_config['namespace']}-state", + resource_group_name=f"{nebari_config.escaped_project_name}-{nebari_config.namespace}-state", # storage account must be globally unique - storage_account_name=f"{nebari_config['project_name']}{nebari_config['namespace']}{nebari_config['azure']['storage_account_postfix']}", - container_name=f"{nebari_config['project_name']}-{nebari_config['namespace']}-state", - key=f"terraform/{nebari_config['project_name']}-{nebari_config['namespace']}/{directory}", + storage_account_name=f"{nebari_config.escaped_project_name}{nebari_config.namespace}{nebari_config.azure.storage_account_postfix}", + container_name=f"{nebari_config.escaped_project_name}-{nebari_config.namespace}-state", + key=f"terraform/{nebari_config.escaped_project_name}-{nebari_config.namespace}/{directory}", ) - elif nebari_config["provider"] == "existing": + elif nebari_config.provider == "existing": optional_kwargs = {} - if "kube_context" in nebari_config["existing"]: - optional_kwargs["confix_context"] = nebari_config["existing"][ - "kube_context" - ] + if "kube_context" in nebari_config.existing: + optional_kwargs["config_context"] = nebari_config.existing.kube_context return TerraformBackend( "kubernetes", - secret_suffix=f"{nebari_config['project_name']}-{nebari_config['namespace']}-{directory}", + secret_suffix=f"{nebari_config.escaped_project_name}-{nebari_config.namespace}-{directory}", load_config_file=True, **optional_kwargs, ) - elif nebari_config["provider"] == "local": + elif nebari_config.provider == "local": optional_kwargs = {} - if "kube_context" in nebari_config["local"]: - optional_kwargs["confix_context"] = nebari_config["local"]["kube_context"] + if "kube_context" in nebari_config.local: + optional_kwargs["config_context"] = nebari_config.local.kube_context return TerraformBackend( "kubernetes", - secret_suffix=f"{nebari_config['project_name']}-{nebari_config['namespace']}-{directory}", + secret_suffix=f"{nebari_config.escaped_project_name}-{nebari_config.namespace}-{directory}", load_config_file=True, **optional_kwargs, ) else: raise NotImplementedError("state not implemented") - - -def stage_01_terraform_state(config): - if config["provider"] == "gcp": - return { - Path("stages") - / "01-terraform-state" - / "gcp" - / "_nebari.tf.json": tf_render_objects( - [ - NebariGCPProvider(config), - ] - ) - } - elif config["provider"] == "aws": - return { - Path("stages") - / "01-terraform-state" - / "aws" - / "_nebari.tf.json": tf_render_objects( - [ - NebariAWSProvider(config), - ] - ) - } - else: - return {} - - -def stage_02_infrastructure(config): - if config["provider"] == "gcp": - return { - Path("stages") - / "02-infrastructure" - / "gcp" - / "_nebari.tf.json": tf_render_objects( - [ - NebariGCPProvider(config), - NebariTerraformState("02-infrastructure", config), - ] - ) - } - elif config["provider"] == "do": - return { - Path("stages") - / "02-infrastructure" - / "do" - / "_nebari.tf.json": tf_render_objects( - [ - NebariTerraformState("02-infrastructure", config), - ] - ) - } - elif config["provider"] == "azure": - return { - Path("stages") - / "02-infrastructure" - / "azure" - / "_nebari.tf.json": tf_render_objects( - [ - NebariTerraformState("02-infrastructure", config), - ] - ), - } - elif config["provider"] == "aws": - return { - Path("stages") - / "02-infrastructure" - / "aws" - / "_nebari.tf.json": tf_render_objects( - [ - NebariAWSProvider(config), - NebariTerraformState("02-infrastructure", config), - ] - ) - } - else: - return {} - - -def stage_03_kubernetes_initialize(config): - return { - Path("stages") - / "03-kubernetes-initialize" - / "_nebari.tf.json": tf_render_objects( - [ - NebariTerraformState("03-kubernetes-initialize", config), - NebariKubernetesProvider(config), - NebariHelmProvider(config), - ] - ), - } - - -def stage_04_kubernetes_ingress(config): - return { - Path("stages") - / "04-kubernetes-ingress" - / "_nebari.tf.json": tf_render_objects( - [ - NebariTerraformState("04-kubernetes-ingress", config), - NebariKubernetesProvider(config), - NebariHelmProvider(config), - ] - ), - } - - -def stage_05_kubernetes_keycloak(config): - return { - Path("stages") - / "05-kubernetes-keycloak" - / "_nebari.tf.json": tf_render_objects( - [ - NebariTerraformState("05-kubernetes-keycloak", config), - NebariKubernetesProvider(config), - NebariHelmProvider(config), - ] - ), - } - - -def stage_06_kubernetes_keycloak_configuration(config): - return { - Path("stages") - / "06-kubernetes-keycloak-configuration" - / "_nebari.tf.json": tf_render_objects( - [ - NebariTerraformState("06-kubernetes-keycloak-configuration", config), - ] - ), - } - - -def stage_07_kubernetes_services(config): - return { - Path("stages") - / "07-kubernetes-services" - / "_nebari.tf.json": tf_render_objects( - [ - NebariTerraformState("07-kubernetes-services", config), - NebariKubernetesProvider(config), - NebariHelmProvider(config), - ] - ), - } - - -def stage_08_nebari_tf_extensions(config): - return { - Path("stages") - / "08-nebari-tf-extensions" - / "_nebari.tf.json": tf_render_objects( - [ - NebariTerraformState("08-nebari-tf-extensions", config), - NebariKubernetesProvider(config), - NebariHelmProvider(config), - ] - ), - } diff --git a/src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/__init__.py b/src/_nebari/subcommands/__init__.py similarity index 100% rename from src/_nebari/template/stages/07-kubernetes-services/modules/kubernetes/services/jupyterhub/files/jupyterhub/__init__.py rename to src/_nebari/subcommands/__init__.py diff --git a/src/_nebari/subcommands/deploy.py b/src/_nebari/subcommands/deploy.py new file mode 100644 index 000000000..6c0564a50 --- /dev/null +++ b/src/_nebari/subcommands/deploy.py @@ -0,0 +1,80 @@ +import pathlib + +import typer + +from _nebari.config import read_configuration +from _nebari.deploy import deploy_configuration +from _nebari.render import render_template +from nebari.hookspecs import hookimpl + + +@hookimpl +def nebari_subcommand(cli: typer.Typer): + @cli.command() + def deploy( + ctx: typer.Context, + config_filename: pathlib.Path = typer.Option( + ..., + "--config", + "-c", + help="nebari configuration yaml file path", + ), + output_directory: pathlib.Path = typer.Option( + "./", + "-o", + "--output", + help="output directory", + ), + dns_provider: str = typer.Option( + False, + "--dns-provider", + help="dns provider to use for registering domain name mapping", + ), + dns_auto_provision: bool = typer.Option( + False, + "--dns-auto-provision", + help="Attempt to automatically provision DNS, currently only available for `cloudflare`", + ), + disable_prompt: bool = typer.Option( + False, + "--disable-prompt", + help="Disable human intervention", + ), + disable_render: bool = typer.Option( + False, + "--disable-render", + help="Disable auto-rendering in deploy stage", + ), + disable_checks: bool = typer.Option( + False, + "--disable-checks", + help="Disable the checks performed after each stage", + ), + skip_remote_state_provision: bool = typer.Option( + False, + "--skip-remote-state-provision", + help="Skip terraform state deployment which is often required in CI once the terraform remote state bootstrapping phase is complete", + ), + ): + """ + Deploy the Nebari cluster from your [purple]nebari-config.yaml[/purple] file. + """ + from nebari.plugins import nebari_plugin_manager + + stages = nebari_plugin_manager.ordered_stages + config_schema = nebari_plugin_manager.config_schema + + config = read_configuration(config_filename, config_schema=config_schema) + + if not disable_render: + render_template(output_directory, config, stages) + + deploy_configuration( + config, + stages, + dns_provider=dns_provider, + dns_auto_provision=dns_auto_provision, + disable_prompt=disable_prompt, + disable_checks=disable_checks, + skip_remote_state_provision=skip_remote_state_provision, + ) diff --git a/src/_nebari/subcommands/destroy.py b/src/_nebari/subcommands/destroy.py new file mode 100644 index 000000000..d94f8cd05 --- /dev/null +++ b/src/_nebari/subcommands/destroy.py @@ -0,0 +1,59 @@ +import pathlib + +import typer + +from _nebari.config import read_configuration +from _nebari.destroy import destroy_configuration +from _nebari.render import render_template +from nebari.hookspecs import hookimpl + + +@hookimpl +def nebari_subcommand(cli: typer.Typer): + @cli.command() + def destroy( + ctx: typer.Context, + config_filename: pathlib.Path = typer.Option( + ..., "-c", "--config", help="nebari configuration file path" + ), + output_directory: pathlib.Path = typer.Option( + "./", + "-o", + "--output", + help="output directory", + ), + disable_render: bool = typer.Option( + False, + "--disable-render", + help="Disable auto-rendering before destroy", + ), + disable_prompt: bool = typer.Option( + False, + "--disable-prompt", + help="Destroy entire Nebari cluster without confirmation request. Suggested for CI use.", + ), + ): + """ + Destroy the Nebari cluster from your [purple]nebari-config.yaml[/purple] file. + """ + from nebari.plugins import nebari_plugin_manager + + stages = nebari_plugin_manager.ordered_stages + config_schema = nebari_plugin_manager.config_schema + + def _run_destroy( + config_filename=config_filename, disable_render=disable_render + ): + config = read_configuration(config_filename, config_schema=config_schema) + + if not disable_render: + render_template(output_directory, config, stages) + + destroy_configuration(config, stages) + + if disable_prompt: + _run_destroy() + elif typer.confirm("Are you sure you want to destroy your Nebari cluster?"): + _run_destroy() + else: + raise typer.Abort() diff --git a/src/_nebari/subcommands/dev.py b/src/_nebari/subcommands/dev.py new file mode 100644 index 000000000..59bfc77d5 --- /dev/null +++ b/src/_nebari/subcommands/dev.py @@ -0,0 +1,55 @@ +import json +import pathlib + +import typer + +from _nebari.config import read_configuration +from _nebari.keycloak import keycloak_rest_api_call +from nebari.hookspecs import hookimpl + + +@hookimpl +def nebari_subcommand(cli: typer.Typer): + app_dev = typer.Typer( + add_completion=False, + no_args_is_help=True, + rich_markup_mode="rich", + context_settings={"help_option_names": ["-h", "--help"]}, + ) + + cli.add_typer( + app_dev, + name="dev", + help="Development tools and advanced features.", + rich_help_panel="Additional Commands", + ) + + @app_dev.command(name="keycloak-api") + def keycloak_api( + config_filename: pathlib.Path = typer.Option( + ..., + "-c", + "--config", + help="nebari configuration file path", + ), + request: str = typer.Option( + ..., + "-r", + "--request", + help="Send a REST API request, valid requests follow patterns found here: [green]keycloak.org/docs-api/15.0/rest-api[/green]", + ), + ): + """ + Interact with the Keycloak REST API directly. + + This is an advanced tool which can have potentially destructive consequences. + Please use this at your own risk. + + """ + from nebari.plugins import nebari_plugin_manager + + config_schema = nebari_plugin_manager.config_schema + + read_configuration(config_filename, config_schema=config_schema) + r = keycloak_rest_api_call(config_filename, request=request) + print(json.dumps(r, indent=4)) diff --git a/src/_nebari/subcommands/info.py b/src/_nebari/subcommands/info.py new file mode 100644 index 000000000..1a36afceb --- /dev/null +++ b/src/_nebari/subcommands/info.py @@ -0,0 +1,43 @@ +import collections + +import rich +import typer +from rich.table import Table + +from _nebari.version import __version__ +from nebari.hookspecs import hookimpl + + +@hookimpl +def nebari_subcommand(cli: typer.Typer): + @cli.command() + def info(ctx: typer.Context): + from nebari.plugins import nebari_plugin_manager + + rich.print(f"Nebari version: {__version__}") + + hooks = collections.defaultdict(list) + for plugin in nebari_plugin_manager.plugin_manager.get_plugins(): + for hook in nebari_plugin_manager.plugin_manager.get_hookcallers(plugin): + hooks[hook.name].append(plugin.__name__) + + table = Table(title="Hooks") + table.add_column("hook", justify="left", no_wrap=True) + table.add_column("module", justify="left", no_wrap=True) + + for hook_name, modules in hooks.items(): + for module in modules: + table.add_row(hook_name, module) + + rich.print(table) + + table = Table(title="Runtime Stage Ordering") + table.add_column("name") + table.add_column("priority") + table.add_column("module") + for stage in nebari_plugin_manager.ordered_stages: + table.add_row( + stage.name, str(stage.priority), f"{stage.__module__}.{stage.__name__}" + ) + + rich.print(table) diff --git a/src/_nebari/cli/init.py b/src/_nebari/subcommands/init.py similarity index 69% rename from src/_nebari/cli/init.py rename to src/_nebari/subcommands/init.py index 0e86c26b8..4ec23adec 100644 --- a/src/_nebari/cli/init.py +++ b/src/_nebari/subcommands/init.py @@ -1,22 +1,22 @@ +import enum import os +import pathlib import re -from pathlib import Path +import typing import questionary import rich import typer +from pydantic import BaseModel +from _nebari.config import write_configuration from _nebari.initialize import render_config -from _nebari.schema import ( - AuthenticationEnum, - CiEnum, - GitRepoEnum, - InitInputs, - ProviderEnum, - TerraformStateEnum, - project_name_convention, -) -from _nebari.utils import NEBARI_DASK_VERSION, NEBARI_IMAGE_TAG, yaml +from _nebari.stages.bootstrap import CiEnum +from _nebari.stages.kubernetes_keycloak import AuthenticationEnum +from _nebari.stages.terraform_state import TerraformStateEnum +from nebari import schema +from nebari.hookspecs import hookimpl +from nebari.schema import ProviderEnum MISSING_CREDS_TEMPLATE = "Unable to locate your {provider} credentials, refer to this guide on how to generate them:\n\n[green]\t{link_to_docs}[/green]\n\n" LINKS_TO_DOCS_TEMPLATE = ( @@ -41,24 +41,43 @@ DOCS_HOME = "https://nebari.dev/docs/" CHOOSE_CLOUD_PROVIDER = "https://nebari.dev/docs/get-started/deploy" +GUIDED_INIT_MSG = ( + "[bold green]START HERE[/bold green] - this will guide you step-by-step " + "to generate your [purple]nebari-config.yaml[/purple]. " + "It is an [i]alternative[/i] to passing the options listed below." +) + + +class GitRepoEnum(str, enum.Enum): + github = "github.com" + gitlab = "gitlab.com" + + +class InitInputs(schema.Base): + cloud_provider: ProviderEnum = ProviderEnum.local + project_name: schema.letter_dash_underscore_pydantic = "" + domain_name: typing.Optional[str] = None + namespace: typing.Optional[schema.letter_dash_underscore_pydantic] = "dev" + auth_provider: AuthenticationEnum = AuthenticationEnum.password + auth_auto_provision: bool = False + repository: typing.Union[str, None] = None + repository_auto_provision: bool = False + ci_provider: CiEnum = CiEnum.none + terraform_state: TerraformStateEnum = TerraformStateEnum.remote + kubernetes_version: typing.Union[str, None] = None + ssl_cert_email: typing.Union[schema.email_pydantic, None] = None + disable_prompt: bool = False + output: pathlib.Path = pathlib.Path("nebari-config.yaml") + def enum_to_list(enum_cls): - return [e.value.lower() for e in enum_cls] + return [e.value for e in enum_cls] -def handle_init(inputs: InitInputs): +def handle_init(inputs: InitInputs, config_schema: BaseModel): """ Take the inputs from the `nebari init` command, render the config and write it to a local yaml file. """ - if NEBARI_IMAGE_TAG: - print( - f"Modifying the image tags for the `default_images`, setting tags to: {NEBARI_IMAGE_TAG}" - ) - - if NEBARI_DASK_VERSION: - print( - f"Modifying the version of the `nebari_dask` package, setting version to: {NEBARI_DASK_VERSION}" - ) # this will force the `set_kubernetes_version` to grab the latest version if inputs.kubernetes_version == "latest": @@ -81,18 +100,119 @@ def handle_init(inputs: InitInputs): ) try: - with open("nebari-config.yaml", "x") as f: - yaml.dump(config, f) + write_configuration( + inputs.output, + config, + mode="x", + ) except FileExistsError: raise ValueError( "A nebari-config.yaml file already exists. Please move or delete it and try again." ) -def check_cloud_provider_creds(ctx: typer.Context, cloud_provider: str): +def check_repository_creds(ctx: typer.Context, git_provider: str): + """Validate the necessary Git provider (GitHub) credentials are set.""" + + if ( + git_provider == GitRepoEnum.github.value.lower() + and not os.environ.get("GITHUB_USERNAME") + or not os.environ.get("GITHUB_TOKEN") + ): + os.environ["GITHUB_USERNAME"] = typer.prompt( + "Paste your GITHUB_USERNAME", + hide_input=True, + ) + os.environ["GITHUB_TOKEN"] = typer.prompt( + "Paste your GITHUB_TOKEN", + hide_input=True, + ) + + +def typer_validate_regex(regex: str, error_message: str = None): + def callback(value): + if value is None: + return value + + if re.fullmatch(regex, value): + return value + message = error_message or f"Does not match {regex}" + raise typer.BadParameter(message) + + return callback + + +def questionary_validate_regex(regex: str, error_message: str = None): + def callback(value): + if re.fullmatch(regex, value): + return True + + message = error_message or f"Invalid input. Does not match {regex}" + return message + + return callback + + +def check_auth_provider_creds(ctx: typer.Context, auth_provider: str): + """Validate the the necessary auth provider credentials have been set as environment variables.""" + if ctx.params.get("disable_prompt"): + return auth_provider.lower() + + auth_provider = auth_provider.lower() + + # Auth0 + if auth_provider == AuthenticationEnum.auth0.value.lower() and ( + not os.environ.get("AUTH0_CLIENT_ID") + or not os.environ.get("AUTH0_CLIENT_SECRET") + or not os.environ.get("AUTH0_DOMAIN") + ): + rich.print( + MISSING_CREDS_TEMPLATE.format( + provider="Auth0", link_to_docs=CREATE_AUTH0_CREDS + ) + ) + + os.environ["AUTH0_CLIENT_ID"] = typer.prompt( + "Paste your AUTH0_CLIENT_ID", + hide_input=True, + ) + os.environ["AUTH0_CLIENT_SECRET"] = typer.prompt( + "Paste your AUTH0_CLIENT_SECRET", + hide_input=True, + ) + os.environ["AUTH0_DOMAIN"] = typer.prompt( + "Paste your AUTH0_DOMAIN", + hide_input=True, + ) + + # GitHub + elif auth_provider == AuthenticationEnum.github.value.lower() and ( + not os.environ.get("GITHUB_CLIENT_ID") + or not os.environ.get("GITHUB_CLIENT_SECRET") + ): + rich.print( + MISSING_CREDS_TEMPLATE.format( + provider="GitHub OAuth App", link_to_docs=CREATE_GITHUB_OAUTH_CREDS + ) + ) + + os.environ["GITHUB_CLIENT_ID"] = typer.prompt( + "Paste your GITHUB_CLIENT_ID", + hide_input=True, + ) + os.environ["GITHUB_CLIENT_SECRET"] = typer.prompt( + "Paste your GITHUB_CLIENT_SECRET", + hide_input=True, + ) + + return auth_provider + + +def check_cloud_provider_creds(ctx: typer.Context, cloud_provider: ProviderEnum): """Validate that the necessary cloud credentials have been set as environment variables.""" + if ctx.params.get("disable_prompt"): - return cloud_provider + return cloud_provider.lower() cloud_provider = cloud_provider.lower() @@ -194,94 +314,127 @@ def check_cloud_provider_creds(ctx: typer.Context, cloud_provider: str): return cloud_provider -def check_auth_provider_creds(ctx: typer.Context, auth_provider: str): - """Validate the the necessary auth provider credentials have been set as environment variables.""" - if ctx.params.get("disable_prompt"): - return auth_provider - - auth_provider = auth_provider.lower() - - # Auth0 - if auth_provider == AuthenticationEnum.auth0.value.lower() and ( - not os.environ.get("AUTH0_CLIENT_ID") - or not os.environ.get("AUTH0_CLIENT_SECRET") - or not os.environ.get("AUTH0_DOMAIN") +@hookimpl +def nebari_subcommand(cli: typer.Typer): + @cli.command() + def init( + cloud_provider: ProviderEnum = typer.Argument( + ProviderEnum.local, + help=f"options: {enum_to_list(ProviderEnum)}", + callback=check_cloud_provider_creds, + is_eager=True, + ), + # Although this unused below, the functionality is contained in the callback. Thus, + # this attribute cannot be removed. + guided_init: bool = typer.Option( + False, + help=GUIDED_INIT_MSG, + callback=guided_init_wizard, + is_eager=True, + ), + project_name: str = typer.Option( + ..., + "--project-name", + "--project", + "-p", + callback=typer_validate_regex( + schema.namestr_regex, + "Project name must begin with a letter and consist of letters, numbers, dashes, or underscores.", + ), + ), + domain_name: typing.Optional[str] = typer.Option( + None, + "--domain-name", + "--domain", + "-d", + ), + namespace: str = typer.Option( + "dev", + callback=typer_validate_regex( + schema.namestr_regex, + "Namespace must begin with a letter and consist of letters, numbers, dashes, or underscores.", + ), + ), + auth_provider: AuthenticationEnum = typer.Option( + AuthenticationEnum.password, + help=f"options: {enum_to_list(AuthenticationEnum)}", + callback=check_auth_provider_creds, + ), + auth_auto_provision: bool = typer.Option( + False, + ), + repository: GitRepoEnum = typer.Option( + None, + help=f"options: {enum_to_list(GitRepoEnum)}", + ), + repository_auto_provision: bool = typer.Option( + False, + ), + ci_provider: CiEnum = typer.Option( + CiEnum.none, + help=f"options: {enum_to_list(CiEnum)}", + ), + terraform_state: TerraformStateEnum = typer.Option( + TerraformStateEnum.remote, + help=f"options: {enum_to_list(TerraformStateEnum)}", + ), + kubernetes_version: str = typer.Option( + "latest", + ), + ssl_cert_email: str = typer.Option( + None, + callback=typer_validate_regex( + schema.email_regex, + f"Email must be valid and match the regex {schema.email_regex}", + ), + ), + disable_prompt: bool = typer.Option( + False, + is_eager=True, + ), + output: str = typer.Option( + pathlib.Path("nebari-config.yaml"), + "--output", + "-o", + help="Output file path for the rendered config file.", + ), ): - rich.print( - MISSING_CREDS_TEMPLATE.format( - provider="Auth0", link_to_docs=CREATE_AUTH0_CREDS - ) - ) - - os.environ["AUTH0_CLIENT_ID"] = typer.prompt( - "Paste your AUTH0_CLIENT_ID", - hide_input=True, - ) - os.environ["AUTH0_CLIENT_SECRET"] = typer.prompt( - "Paste your AUTH0_CLIENT_SECRET", - hide_input=True, - ) - os.environ["AUTH0_DOMAIN"] = typer.prompt( - "Paste your AUTH0_DOMAIN", - hide_input=True, - ) - - # GitHub - elif auth_provider == AuthenticationEnum.github.value.lower() and ( - not os.environ.get("GITHUB_CLIENT_ID") - or not os.environ.get("GITHUB_CLIENT_SECRET") - ): - rich.print( - MISSING_CREDS_TEMPLATE.format( - provider="GitHub OAuth App", link_to_docs=CREATE_GITHUB_OAUTH_CREDS - ) - ) - - os.environ["GITHUB_CLIENT_ID"] = typer.prompt( - "Paste your GITHUB_CLIENT_ID", - hide_input=True, - ) - os.environ["GITHUB_CLIENT_SECRET"] = typer.prompt( - "Paste your GITHUB_CLIENT_SECRET", - hide_input=True, - ) + """ + Create and initialize your [purple]nebari-config.yaml[/purple] file. - return auth_provider + This command will create and initialize your [purple]nebari-config.yaml[/purple] :sparkles: + This file contains all your Nebari cluster configuration details and, + is used as input to later commands such as [green]nebari render[/green], [green]nebari deploy[/green], etc. -def check_project_name(ctx: typer.Context, project_name: str): - """Validate the project_name is acceptable. Depends on `cloud_provider`.""" - project_name_convention( - project_name.lower(), {"provider": ctx.params["cloud_provider"]} - ) + If you're new to Nebari, we recommend you use the Guided Init wizard. + To get started simply run: - return project_name + [green]nebari init --guided-init[/green] + """ + inputs = InitInputs() -def check_ssl_cert_email(ctx: typer.Context, ssl_cert_email: str): - """Validate the email used for SSL cert is in a valid format.""" - if ssl_cert_email and not re.match("^[^ @]+@[^ @]+\\.[^ @]+$", ssl_cert_email): - raise ValueError("ssl-cert-email should be a valid email address") + inputs.cloud_provider = cloud_provider + inputs.project_name = project_name + inputs.domain_name = domain_name + inputs.namespace = namespace + inputs.auth_provider = auth_provider + inputs.auth_auto_provision = auth_auto_provision + inputs.repository = repository + inputs.repository_auto_provision = repository_auto_provision + inputs.ci_provider = ci_provider + inputs.terraform_state = terraform_state + inputs.kubernetes_version = kubernetes_version + inputs.ssl_cert_email = ssl_cert_email + inputs.disable_prompt = disable_prompt + inputs.output = output - return ssl_cert_email + from nebari.plugins import nebari_plugin_manager + handle_init(inputs, config_schema=nebari_plugin_manager.config_schema) -def check_repository_creds(ctx: typer.Context, git_provider: str): - """Validate the necessary Git provider (GitHub) credentials are set.""" - - if ( - git_provider == GitRepoEnum.github.value.lower() - and not os.environ.get("GITHUB_USERNAME") - or not os.environ.get("GITHUB_TOKEN") - ): - os.environ["GITHUB_USERNAME"] = typer.prompt( - "Paste your GITHUB_USERNAME", - hide_input=True, - ) - os.environ["GITHUB_TOKEN"] = typer.prompt( - "Paste your GITHUB_TOKEN", - hide_input=True, - ) + nebari_plugin_manager.read_config(output) def guided_init_wizard(ctx: typer.Context, guided_init: str): @@ -291,7 +444,7 @@ def guided_init_wizard(ctx: typer.Context, guided_init: str): qmark = " " disable_checks = os.environ.get("NEBARI_DISABLE_INIT_CHECKS", False) - if Path("nebari-config.yaml").exists(): + if pathlib.Path("nebari-config.yaml").exists(): raise ValueError( "A nebari-config.yaml file already exists. Please move or delete it and try again." ) @@ -359,24 +512,23 @@ def guided_init_wizard(ctx: typer.Context, guided_init: str): inputs.project_name = questionary.text( "What project name would you like to use?", qmark=qmark, - validate=lambda text: True if len(text) > 0 else "Please enter a value", + validate=questionary_validate_regex(schema.namestr_regex), ).unsafe_ask() - if not disable_checks: - check_project_name(ctx, inputs.project_name) - # DOMAIN NAME rich.print( ( - "\n\n 🪴 Great! Now you need to provide a valid domain name (i.e. the URL) to access your Nebri instance. " - "This should be a domain that you own.\n\n" + "\n\n 🪴 Great! Now you can provide a valid domain name (i.e. the URL) to access your Nebri instance. " + "This should be a domain that you own. Default if unspecified is the IP of the load balancer.\n\n" ) ) - inputs.domain_name = questionary.text( - "What domain name would you like to use?", - qmark=qmark, - validate=lambda text: True if len(text) > 0 else "Please enter a value", - ).unsafe_ask() + inputs.domain_name = ( + questionary.text( + "What domain name would you like to use?", + qmark=qmark, + ).unsafe_ask() + or None + ) # AUTH PROVIDER rich.print( @@ -466,27 +618,31 @@ def guided_init_wizard(ctx: typer.Context, guided_init: str): inputs.ci_provider = CiEnum.gitlab_ci.value.lower() # SSL CERTIFICATE - rich.print( - ( - "\n\n 🪴 This next section is [italic]optional[/italic] but recommended. If you want your Nebari domain to use a Let's Encrypt SSL certificate, " - "all we need is an email address from you.\n\n" + if inputs.domain_name: + rich.print( + ( + "\n\n 🪴 This next section is [italic]optional[/italic] but recommended. If you want your Nebari domain to use a Let's Encrypt SSL certificate, " + "all we need is an email address from you.\n\n" + ) ) - ) - ssl_cert = questionary.confirm( - "Would you like to add a Let's Encrypt SSL certificate to your domain?", - default=False, - qmark=qmark, - auto_enter=False, - ).unsafe_ask() - - if ssl_cert: - inputs.ssl_cert_email = questionary.text( - "Which email address should Let's Encrypt associate the certificate with?", + ssl_cert = questionary.confirm( + "Would you like to add a Let's Encrypt SSL certificate to your domain?", + default=False, qmark=qmark, + auto_enter=False, ).unsafe_ask() - if not disable_checks: - check_ssl_cert_email(ctx, ssl_cert_email=inputs.ssl_cert_email) + if ssl_cert: + inputs.ssl_cert_email = questionary.text( + "Which email address should Let's Encrypt associate the certificate with?", + qmark=qmark, + ).unsafe_ask() + + if not disable_checks: + typer_validate_regex( + schema.email_regex, + f"Email must be valid and match the regex {schema.email_regex}", + ) # ADVANCED FEATURES rich.print( @@ -523,7 +679,11 @@ def guided_init_wizard(ctx: typer.Context, guided_init: str): qmark=qmark, ).unsafe_ask() - handle_init(inputs) + from nebari.plugins import nebari_plugin_manager + + config_schema = nebari_plugin_manager.config_schema + + handle_init(inputs, config_schema=config_schema) rich.print( ( diff --git a/src/_nebari/subcommands/keycloak.py b/src/_nebari/subcommands/keycloak.py new file mode 100644 index 000000000..8f57d3417 --- /dev/null +++ b/src/_nebari/subcommands/keycloak.py @@ -0,0 +1,85 @@ +import json +import pathlib +from typing import Tuple + +import typer + +from _nebari.config import read_configuration +from _nebari.keycloak import do_keycloak, export_keycloak_users +from nebari.hookspecs import hookimpl + + +@hookimpl +def nebari_subcommand(cli: typer.Typer): + app_keycloak = typer.Typer( + add_completion=False, + no_args_is_help=True, + rich_markup_mode="rich", + context_settings={"help_option_names": ["-h", "--help"]}, + ) + + cli.add_typer( + app_keycloak, + name="keycloak", + help="Interact with the Nebari Keycloak identity and access management tool.", + rich_help_panel="Additional Commands", + ) + + @app_keycloak.command(name="adduser") + def add_user( + add_users: Tuple[str, str] = typer.Option( + ..., "--user", help="Provide both: " + ), + config_filename: pathlib.Path = typer.Option( + ..., + "-c", + "--config", + help="nebari configuration file path", + ), + ): + """Add a user to Keycloak. User will be automatically added to the [italic]analyst[/italic] group.""" + from nebari.plugins import nebari_plugin_manager + + args = ["adduser", add_users[0], add_users[1]] + config_schema = nebari_plugin_manager.config_schema + config = read_configuration(config_filename, config_schema) + do_keycloak(config, *args) + + @app_keycloak.command(name="listusers") + def list_users( + config_filename: pathlib.Path = typer.Option( + ..., + "-c", + "--config", + help="nebari configuration file path", + ) + ): + """List the users in Keycloak.""" + from nebari.plugins import nebari_plugin_manager + + args = ["listusers"] + config_schema = nebari_plugin_manager.config_schema + config = read_configuration(config_filename, config_schema) + do_keycloak(config, *args) + + @app_keycloak.command(name="export-users") + def export_users( + config_filename: pathlib.Path = typer.Option( + ..., + "-c", + "--config", + help="nebari configuration file path", + ), + realm: str = typer.Option( + "nebari", + "--realm", + help="realm from which users are to be exported", + ), + ): + """Export the users in Keycloak.""" + from nebari.plugins import nebari_plugin_manager + + config_schema = nebari_plugin_manager.config_schema + config = read_configuration(config_filename, config_schema=config_schema) + r = export_keycloak_users(config, realm=realm) + print(json.dumps(r, indent=4)) diff --git a/src/_nebari/subcommands/render.py b/src/_nebari/subcommands/render.py new file mode 100644 index 000000000..9c260061f --- /dev/null +++ b/src/_nebari/subcommands/render.py @@ -0,0 +1,42 @@ +import pathlib + +import typer + +from _nebari.config import read_configuration +from _nebari.render import render_template +from nebari.hookspecs import hookimpl + + +@hookimpl +def nebari_subcommand(cli: typer.Typer): + @cli.command(rich_help_panel="Additional Commands") + def render( + ctx: typer.Context, + output_directory: pathlib.Path = typer.Option( + "./", + "-o", + "--output", + help="output directory", + ), + config_filename: pathlib.Path = typer.Option( + ..., + "-c", + "--config", + help="nebari configuration yaml file path", + ), + dry_run: bool = typer.Option( + False, + "--dry-run", + help="simulate rendering files without actually writing or updating any files", + ), + ): + """ + Dynamically render the Terraform scripts and other files from your [purple]nebari-config.yaml[/purple] file. + """ + from nebari.plugins import nebari_plugin_manager + + stages = nebari_plugin_manager.ordered_stages + config_schema = nebari_plugin_manager.config_schema + + config = read_configuration(config_filename, config_schema=config_schema) + render_template(output_directory, config, stages, dry_run=dry_run) diff --git a/src/_nebari/subcommands/support.py b/src/_nebari/subcommands/support.py new file mode 100644 index 000000000..93b185fa2 --- /dev/null +++ b/src/_nebari/subcommands/support.py @@ -0,0 +1,84 @@ +import pathlib +from zipfile import ZipFile + +import kubernetes.client +import kubernetes.config +import typer + +from _nebari.config import read_configuration +from nebari.hookspecs import hookimpl + + +@hookimpl +def nebari_subcommand(cli: typer.Typer): + @cli.command(rich_help_panel="Additional Commands") + def support( + config_filename: pathlib.Path = typer.Option( + ..., + "-c", + "--config", + help="nebari configuration file path", + ), + output: str = typer.Option( + "./nebari-support-logs.zip", + "-o", + "--output", + help="output filename", + ), + ): + """ + Support tool to write all Kubernetes logs locally and compress them into a zip file. + + The Nebari team recommends k9s to manage and inspect the state of the cluster. + However, this command occasionally helpful for debugging purposes should the logs need to be shared. + """ + from nebari.plugins import nebari_plugin_manager + + kubernetes.config.kube_config.load_kube_config() + + v1 = kubernetes.client.CoreV1Api() + + config_schema = nebari_plugin_manager.config_schema + namespace = read_configuration(config_filename, config_schema).namespace + + pods = v1.list_namespaced_pod(namespace=namespace) + + for pod in pods.items: + pathlib.Path(f"./log/{namespace}").mkdir(parents=True, exist_ok=True) + path = pathlib.Path(f"./log/{namespace}/{pod.metadata.name}.txt") + with path.open(mode="wt") as file: + try: + file.write( + "%s\t%s\t%s\n" + % ( + pod.status.pod_ip, + namespace, + pod.metadata.name, + ) + ) + + # some pods are running multiple containers + containers = [ + _.name if len(pod.spec.containers) > 1 else None + for _ in pod.spec.containers + ] + + for container in containers: + if container is not None: + file.write(f"Container: {container}\n") + file.write( + v1.read_namespaced_pod_log( + name=pod.metadata.name, + namespace=namespace, + container=container, + ) + ) + + except client.exceptions.ApiException as e: + file.write("%s not available" % pod.metadata.name) + raise e + + with ZipFile(output, "w") as zip: + for file in list(pathlib.Path(f"./log/{namespace}").glob("*.txt")): + print(file) + zip.write(file) diff --git a/src/_nebari/subcommands/upgrade.py b/src/_nebari/subcommands/upgrade.py new file mode 100644 index 000000000..53d6cfabf --- /dev/null +++ b/src/_nebari/subcommands/upgrade.py @@ -0,0 +1,37 @@ +import pathlib + +import typer + +from _nebari.upgrade import do_upgrade +from nebari.hookspecs import hookimpl + + +@hookimpl +def nebari_subcommand(cli: typer.Typer): + @cli.command(rich_help_panel="Additional Commands") + def upgrade( + config_filename: pathlib.Path = typer.Option( + ..., + "-c", + "--config", + help="nebari configuration file path", + ), + attempt_fixes: bool = typer.Option( + False, + "--attempt-fixes", + help="Attempt to fix the config for any incompatibilities between your old and new Nebari versions.", + ), + ): + """ + Upgrade your [purple]nebari-config.yaml[/purple]. + + Upgrade your [purple]nebari-config.yaml[/purple] after an nebari upgrade. If necessary, prompts users to perform manual upgrade steps required for the deploy process. + + See the project [green]RELEASE.md[/green] for details. + """ + if not config_filename.is_file(): + raise ValueError( + f"passed in configuration filename={config_filename} must exist" + ) + + do_upgrade(config_filename, attempt_fixes=attempt_fixes) diff --git a/src/_nebari/subcommands/validate.py b/src/_nebari/subcommands/validate.py new file mode 100644 index 000000000..9cf7448f2 --- /dev/null +++ b/src/_nebari/subcommands/validate.py @@ -0,0 +1,44 @@ +import pathlib + +import pydantic +import typer +from rich import print + +from nebari.hookspecs import hookimpl + + +@hookimpl +def nebari_subcommand(cli: typer.Typer): + @cli.command(rich_help_panel="Additional Commands") + def validate( + config_filename: pathlib.Path = typer.Option( + ..., + "--config", + "-c", + help="nebari configuration yaml file path, please pass in as -c/--config flag", + ), + enable_commenting: bool = typer.Option( + False, "--enable-commenting", help="Toggle PR commenting on GitHub Actions" + ), + ): + """ + Validate the values in the [purple]nebari-config.yaml[/purple] file are acceptable. + """ + if enable_commenting: + # for PR's only + # comment_on_pr(config) + pass + else: + from nebari.plugins import nebari_plugin_manager + + try: + nebari_plugin_manager.read_config(config_filename) + print( + "[bold purple]Successfully validated configuration.[/bold purple]" + ) + except pydantic.ValidationError as e: + print( + f"[bold red]ERROR validating configuration {config_filename.absolute()}[/bold red]" + ) + print(str(e)) + typer.Abort() diff --git a/src/_nebari/template/stages/07-kubernetes-services/variables.tf b/src/_nebari/template/stages/07-kubernetes-services/variables.tf deleted file mode 100644 index fd96f4fd0..000000000 --- a/src/_nebari/template/stages/07-kubernetes-services/variables.tf +++ /dev/null @@ -1,78 +0,0 @@ -variable "name" { - description = "Prefix name to assign to kubernetes resources" - type = string -} - -variable "environment" { - description = "Kubernetes namespace to create resources within" - type = string -} - -variable "endpoint" { - description = "Endpoint for services" - type = string -} - -variable "realm_id" { - description = "Keycloak realm id for creating clients" - type = string -} - -variable "node_groups" { - description = "Node group selectors for kubernetes resources" - type = map(object({ - key = string - value = string - })) -} - -variable "jupyterhub-logout-redirect-url" { - description = "Next redirect destination following a Keycloak logout" - type = string - default = "" -} - -variable "jupyterhub-hub-extraEnv" { - description = "Extracted overrides to merge with jupyterhub.hub.extraEnv" - type = string - default = "[]" -} - -variable "conda-store-default-namespace" { - description = "Default conda-store namespace name" - type = string - default = "nebari-git" -} - -variable "conda-store-service-token-scopes" { - description = "Map of services tokens and scopes for conda-store" - type = map(any) - default = { - "cdsdashboards" = { - "primary_namespace" : "cdsdashboards", - "role_bindings" : { - "*/*" : ["viewer"], - } - } - } -} - - -variable "idle-culler-settings" { - description = "Idle culler timeout settings (in minutes)" - type = any -} - -# allows us to merge variables set in the nebari-config.yaml with the default values below -locals { - default-idle-culler-settings = { - kernel_cull_busy = false - kernel_cull_connected = true - kernel_cull_idle_timeout = 15 - kernel_cull_interval = 5 - server_shutdown_no_activity_timeout = 15 - terminal_cull_inactive_timeout = 15 - terminal_cull_interval = 5 - } - idle-culler-settings = merge(local.default-idle-culler-settings, var.idle-culler-settings) -} diff --git a/src/_nebari/template/stages/__init__.py b/src/_nebari/template/stages/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/_nebari/upgrade.py b/src/_nebari/upgrade.py index 95eb02bb5..6cb5b098a 100644 --- a/src/_nebari/upgrade.py +++ b/src/_nebari/upgrade.py @@ -10,9 +10,10 @@ from pydantic.error_wrappers import ValidationError from rich.prompt import Prompt -from .schema import is_version_accepted, verify -from .utils import backup_config_file, load_yaml, yaml -from .version import __version__, rounded_ver_parse +from _nebari.config import backup_configuration +from _nebari.utils import load_yaml, yaml +from _nebari.version import __version__, rounded_ver_parse +from nebari import schema logger = logging.getLogger(__name__) @@ -31,13 +32,15 @@ def do_upgrade(config_filename, attempt_fixes=False): return try: - verify(config) + from nebari.plugins import nebari_plugin_manager + + nebari_plugin_manager.read_config(config_filename) rich.print( f"Your config file [purple]{config_filename}[/purple] appears to be already up-to-date for Nebari version [green]{__version__}[/green]" ) return except (ValidationError, ValueError) as e: - if is_version_accepted(config.get("nebari_version", "")): + if schema.is_version_accepted(config.get("nebari_version", "")): # There is an unrelated validation problem rich.print( f"Your config file [purple]{config_filename}[/purple] appears to be already up-to-date for Nebari version [green]{__version__}[/green] but there is another validation error.\n" @@ -52,7 +55,7 @@ def do_upgrade(config_filename, attempt_fixes=False): ) # Backup old file - backup_config_file(config_filename, f".{start_version or 'old'}") + backup_configuration(config_filename, f".{start_version or 'old'}") with config_filename.open("wt") as f: yaml.dump(config, f) @@ -348,7 +351,7 @@ def _version_specific_upgrade( if k not in {"users", "admin"} ] - backup_config_file(realm_import_filename) + backup_configuration(realm_import_filename) with realm_import_filename.open("wt") as f: json.dump(realm, f, indent=2) diff --git a/src/_nebari/utils.py b/src/_nebari/utils.py index 4ca07f82b..154f2faf7 100644 --- a/src/_nebari/utils.py +++ b/src/_nebari/utils.py @@ -2,40 +2,24 @@ import functools import os import re +import secrets import signal +import string import subprocess import sys import threading import time +import warnings from pathlib import Path from typing import Dict, List from ruamel.yaml import YAML -from _nebari.constants import DEFAULT_NEBARI_DASK_VERSION, DEFAULT_NEBARI_IMAGE_TAG -from _nebari.provider.cloud import ( - amazon_web_services, - azure_cloud, - digital_ocean, - google_cloud, -) - # environment variable overrides -NEBARI_K8S_VERSION = os.getenv("NEBARI_K8S_VERSION", None) NEBARI_GH_BRANCH = os.getenv("NEBARI_GH_BRANCH", None) -NEBARI_IMAGE_TAG = os.getenv("NEBARI_IMAGE_TAG", None) -NEBARI_DASK_VERSION = os.getenv("NEBARI_DASK_VERSION", None) - -DO_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-do" -AWS_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-aws" -GCP_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-gcp" -AZURE_ENV_DOCS = "https://www.nebari.dev/docs/how-tos/nebari-azure" CONDA_FORGE_CHANNEL_DATA_URL = "https://conda.anaconda.org/conda-forge/channeldata.json" -# Regex for suitable project names -namestr_regex = r"^[A-Za-z][A-Za-z\-_]*[A-Za-z]$" - # Create a ruamel object with our favored config, for universal use yaml = YAML() yaml.preserve_quotes = True @@ -112,70 +96,6 @@ def kill_process(): ) # Should already have finished because we have drained stdout -def check_cloud_credentials(config): - if config["provider"] == "gcp": - for variable in {"GOOGLE_CREDENTIALS"}: - if variable not in os.environ: - raise ValueError( - f"""Missing the following required environment variable: {variable}\n - Please see the documentation for more information: {GCP_ENV_DOCS}""" - ) - elif config["provider"] == "azure": - for variable in { - "ARM_CLIENT_ID", - "ARM_CLIENT_SECRET", - "ARM_SUBSCRIPTION_ID", - "ARM_TENANT_ID", - }: - if variable not in os.environ: - raise ValueError( - f"""Missing the following required environment variable: {variable}\n - Please see the documentation for more information: {AZURE_ENV_DOCS}""" - ) - elif config["provider"] == "aws": - for variable in { - "AWS_ACCESS_KEY_ID", - "AWS_SECRET_ACCESS_KEY", - }: - if variable not in os.environ: - raise ValueError( - f"""Missing the following required environment variable: {variable}\n - Please see the documentation for more information: {AWS_ENV_DOCS}""" - ) - elif config["provider"] == "do": - for variable in { - "AWS_ACCESS_KEY_ID", - "AWS_SECRET_ACCESS_KEY", - "SPACES_ACCESS_KEY_ID", - "SPACES_SECRET_ACCESS_KEY", - "DIGITALOCEAN_TOKEN", - }: - if variable not in os.environ: - raise ValueError( - f"""Missing the following required environment variable: {variable}\n - Please see the documentation for more information: {DO_ENV_DOCS}""" - ) - - if os.environ["AWS_ACCESS_KEY_ID"] != os.environ["SPACES_ACCESS_KEY_ID"]: - raise ValueError( - f"""The environment variables AWS_ACCESS_KEY_ID and SPACES_ACCESS_KEY_ID must be equal\n - See {DO_ENV_DOCS} for more information""" - ) - - if ( - os.environ["AWS_SECRET_ACCESS_KEY"] - != os.environ["SPACES_SECRET_ACCESS_KEY"] - ): - raise ValueError( - f"""The environment variables AWS_SECRET_ACCESS_KEY and SPACES_SECRET_ACCESS_KEY must be equal\n - See {DO_ENV_DOCS} for more information""" - ) - elif config["provider"] in ["local", "existing"]: - pass - else: - raise ValueError("Cloud Provider configuration not supported") - - def load_yaml(config_filename: Path): """ Return yaml dict containing config loaded from config_filename. @@ -186,85 +106,6 @@ def load_yaml(config_filename: Path): return config -def backup_config_file(filename: Path, extrasuffix: str = ""): - if not filename.exists(): - return - - # Backup old file - backup_filename = Path(f"{filename}{extrasuffix}.backup") - - if backup_filename.exists(): - i = 1 - while True: - next_backup_filename = Path(f"{backup_filename}~{i}") - if not next_backup_filename.exists(): - backup_filename = next_backup_filename - break - i = i + 1 - - filename.rename(backup_filename) - print(f"Backing up {filename} as {backup_filename}") - - -def set_kubernetes_version( - config, kubernetes_version, cloud_provider, grab_latest_version=True -): - cloud_provider_dict = { - "aws": { - "full_name": "amazon_web_services", - "k8s_version_checker_func": amazon_web_services.kubernetes_versions, - }, - "azure": { - "full_name": "azure", - "k8s_version_checker_func": azure_cloud.kubernetes_versions, - }, - "do": { - "full_name": "digital_ocean", - "k8s_version_checker_func": digital_ocean.kubernetes_versions, - }, - "gcp": { - "full_name": "google_cloud_platform", - "k8s_version_checker_func": google_cloud.kubernetes_versions, - }, - } - cloud_full_name = cloud_provider_dict[cloud_provider]["full_name"] - func = cloud_provider_dict[cloud_provider]["k8s_version_checker_func"] - cloud_config = config[cloud_full_name] - - def _raise_value_error(cloud_provider, k8s_versions): - raise ValueError( - f"\nInvalid `kubernetes-version` provided: {kubernetes_version}.\nPlease select from one of the following {cloud_provider.upper()} supported Kubernetes versions: {k8s_versions} or omit flag to use latest Kubernetes version available." - ) - - def _check_and_set_kubernetes_version( - kubernetes_version=kubernetes_version, - cloud_provider=cloud_provider, - cloud_config=cloud_config, - func=func, - ): - region = cloud_config["region"] - - # to avoid using cloud provider SDK - # set NEBARI_K8S_VERSION environment variable - if not NEBARI_K8S_VERSION: - k8s_versions = func(region) - else: - k8s_versions = [NEBARI_K8S_VERSION] - - if kubernetes_version: - if kubernetes_version in k8s_versions: - cloud_config["kubernetes_version"] = kubernetes_version - else: - _raise_value_error(cloud_provider, k8s_versions) - elif grab_latest_version: - cloud_config["kubernetes_version"] = k8s_versions[-1] - else: - # grab oldest version - cloud_config["kubernetes_version"] = k8s_versions[0] - - return _check_and_set_kubernetes_version() - - @contextlib.contextmanager def modified_environ(*remove: List[str], **update: Dict[str, str]): """ @@ -297,44 +138,6 @@ def modified_environ(*remove: List[str], **update: Dict[str, str]): [env.pop(k) for k in remove_after] -@contextlib.contextmanager -def kubernetes_provider_context(kubernetes_credentials: Dict[str, str]): - credential_mapping = { - "config_path": "KUBE_CONFIG_PATH", - "config_context": "KUBE_CTX", - "username": "KUBE_USER", - "password": "KUBE_PASSWORD", - "client_certificate": "KUBE_CLIENT_CERT_DATA", - "client_key": "KUBE_CLIENT_KEY_DATA", - "cluster_ca_certificate": "KUBE_CLUSTER_CA_CERT_DATA", - "host": "KUBE_HOST", - "token": "KUBE_TOKEN", - } - - credentials = { - credential_mapping[k]: v - for k, v in kubernetes_credentials.items() - if v is not None - } - with modified_environ(**credentials): - yield - - -@contextlib.contextmanager -def keycloak_provider_context(keycloak_credentials: Dict[str, str]): - credential_mapping = { - "client_id": "KEYCLOAK_CLIENT_ID", - "url": "KEYCLOAK_URL", - "username": "KEYCLOAK_USER", - "password": "KEYCLOAK_PASSWORD", - "realm": "KEYCLOAK_REALM", - } - - credentials = {credential_mapping[k]: v for k, v in keycloak_credentials.items()} - with modified_environ(**credentials): - yield - - def deep_merge(*args): """Deep merge multiple dictionaries. @@ -355,7 +158,9 @@ def deep_merge(*args): >>> print(deep_merge(value_1, value_2)) {'m': 1, 'e': {'f': {'g': {}, 'h': 1}}, 'b': {'d': 2, 'c': 1, 'z': [5, 6, 7]}, 'a': [1, 2, 3, 4]} """ - if len(args) == 1: + if len(args) == 0: + return {} + elif len(args) == 1: return args[0] elif len(args) > 2: return functools.reduce(deep_merge, args, {}) @@ -378,22 +183,86 @@ def deep_merge(*args): return d1 -def set_docker_image_tag() -> str: - """Set docker image tag for `jupyterlab`, `jupyterhub`, and `dask-worker`.""" +# https://github.com/minrk/escapism/blob/master/escapism.py +def escape_string( + to_escape, + safe=set(string.ascii_letters + string.digits), + escape_char="_", + allow_collisions=False, +): + """Escape a string so that it only contains characters in a safe set. - if NEBARI_IMAGE_TAG: - return NEBARI_IMAGE_TAG + Characters outside the safe list will be escaped with _%x_, + where %x is the hex value of the character. - return DEFAULT_NEBARI_IMAGE_TAG + If `allow_collisions` is True, occurrences of `escape_char` + in the input will not be escaped. + In this case, `unescape` cannot be used to reverse the transform + because occurrences of the escape char in the resulting string are ambiguous. + Only use this mode when: -def set_nebari_dask_version() -> str: - """Set version of `nebari-dask` meta package.""" + 1. collisions cannot occur or do not matter, and + 2. unescape will never be called. - if NEBARI_DASK_VERSION: - return NEBARI_DASK_VERSION + .. versionadded: 1.0 + allow_collisions argument. + Prior to 1.0, behavior was the same as allow_collisions=False (default). - return DEFAULT_NEBARI_DASK_VERSION + """ + if sys.version_info >= (3,): + + def _ord(byte): + return byte + + def _bchr(n): + return bytes([n]) + + else: + _ord = ord + _bchr = chr + + def _escape_char(c, escape_char): + """Escape a single character""" + buf = [] + for byte in c.encode("utf8"): + buf.append(escape_char) + buf.append("%X" % _ord(byte)) + return "".join(buf) + + if isinstance(to_escape, bytes): + # always work on text + to_escape = to_escape.decode("utf8") + + if not isinstance(safe, set): + safe = set(safe) + + if allow_collisions: + safe.add(escape_char) + elif escape_char in safe: + warnings.warn( + "Escape character %r cannot be a safe character." + " Set allow_collisions=True if you want to allow ambiguous escaped strings." + % escape_char, + RuntimeWarning, + stacklevel=2, + ) + safe.remove(escape_char) + + chars = [] + for c in to_escape: + if c in safe: + chars.append(c) + else: + chars.append(_escape_char(c, escape_char)) + + return "".join(chars) + + +def random_secure_string( + length: int = 16, chars: str = string.ascii_lowercase + string.digits +): + return "".join(secrets.choice(chars) for i in range(length)) def is_relative_to(self: Path, other: Path, /) -> bool: diff --git a/src/nebari/__main__.py b/src/nebari/__main__.py index 09030b4cb..b18eaf428 100644 --- a/src/nebari/__main__.py +++ b/src/nebari/__main__.py @@ -1,6 +1,10 @@ -import sys +from _nebari.cli import create_cli + + +def main(): + cli = create_cli() + cli() -from _nebari.cli.main import app as main if __name__ == "__main__": - main(sys.argv[1:]) + main() diff --git a/src/nebari/hookspecs.py b/src/nebari/hookspecs.py new file mode 100644 index 000000000..789dfe2d7 --- /dev/null +++ b/src/nebari/hookspecs.py @@ -0,0 +1,50 @@ +import contextlib +import pathlib +from typing import Any, Dict, List + +import pydantic +import typer +from pluggy import HookimplMarker, HookspecMarker + +from nebari import schema + +hookspec = HookspecMarker("nebari") +hookimpl = HookimplMarker("nebari") + + +class NebariStage: + name: str = None + priority: int = None + + input_schema: pydantic.BaseModel = None + output_schema: pydantic.BaseModel = None + + def __init__(self, output_directory: pathlib.Path, config: schema.Main): + self.output_directory = output_directory + self.config = config + + def render(self) -> Dict[str, str]: + return {} + + @contextlib.contextmanager + def deploy(self, stage_outputs: Dict[str, Dict[str, Any]]): + yield + + def check(self, stage_outputs: Dict[str, Dict[str, Any]]) -> bool: + pass + + @contextlib.contextmanager + def destroy( + self, stage_outputs: Dict[str, Dict[str, Any]], status: Dict[str, bool] + ): + yield + + +@hookspec +def nebari_stage() -> List[NebariStage]: + """Registers stages in nebari""" + + +@hookspec +def nebari_subcommand(cli: typer.Typer): + """Register Typer subcommand in nebari""" diff --git a/src/nebari/plugins.py b/src/nebari/plugins.py new file mode 100644 index 000000000..ca593347e --- /dev/null +++ b/src/nebari/plugins.py @@ -0,0 +1,129 @@ +import importlib +import itertools +import os +import re +import sys +import typing +from pathlib import Path + +import pluggy + +from nebari import hookspecs, schema + +DEFAULT_SUBCOMMAND_PLUGINS = [ + # subcommands + "_nebari.subcommands.info", + "_nebari.subcommands.init", + "_nebari.subcommands.dev", + "_nebari.subcommands.deploy", + "_nebari.subcommands.destroy", + "_nebari.subcommands.keycloak", + "_nebari.subcommands.render", + "_nebari.subcommands.support", + "_nebari.subcommands.upgrade", + "_nebari.subcommands.validate", +] + +DEFAULT_STAGES_PLUGINS = [ + # stages + "_nebari.stages.bootstrap", + "_nebari.stages.terraform_state", + "_nebari.stages.infrastructure", + "_nebari.stages.kubernetes_initialize", + "_nebari.stages.kubernetes_ingress", + "_nebari.stages.kubernetes_keycloak", + "_nebari.stages.kubernetes_keycloak_configuration", + "_nebari.stages.kubernetes_services", + "_nebari.stages.nebari_tf_extensions", +] + + +class NebariPluginManager: + plugin_manager = pluggy.PluginManager("nebari") + + exclude_default_stages: bool = False + exclude_stages: typing.List[str] = [] + + def __init__(self) -> None: + self.plugin_manager.add_hookspecs(hookspecs) + + if not hasattr(sys, "_called_from_test"): + # Only load plugins if not running tests + self.plugin_manager.load_setuptools_entrypoints("nebari") + + self.load_plugins(DEFAULT_SUBCOMMAND_PLUGINS) + + def load_plugins(self, plugins: typing.List[str]): + def _import_module_from_filename(plugin: str): + module_name = f"_nebari.stages._files.{plugin.replace(os.sep, '.')}" + spec = importlib.util.spec_from_file_location(module_name, plugin) + mod = importlib.util.module_from_spec(spec) + sys.modules[module_name] = mod + spec.loader.exec_module(mod) + return mod + + for plugin in plugins: + if plugin.endswith(".py"): + mod = _import_module_from_filename(plugin) + else: + mod = importlib.import_module(plugin) + + try: + self.plugin_manager.register(mod, plugin) + except ValueError: + # Pluin already registered + pass + + def get_available_stages(self): + if not self.exclude_default_stages: + self.load_plugins(DEFAULT_STAGES_PLUGINS) + + stages = itertools.chain.from_iterable(self.plugin_manager.hook.nebari_stage()) + + # order stages by priority + sorted_stages = sorted(stages, key=lambda s: s.priority) + + # filter out duplicate stages with same name (keep highest priority) + visited_stage_names = set() + filtered_stages = [] + for stage in reversed(sorted_stages): + if stage.name in visited_stage_names: + continue + filtered_stages.insert(0, stage) + visited_stage_names.add(stage.name) + + # filter out stages which match excluded stages + included_stages = [] + for stage in filtered_stages: + for exclude_stage in self.exclude_stages: + if re.fullmatch(exclude_stage, stage.name) is not None: + break + else: + included_stages.append(stage) + + return included_stages + + def read_config(self, config_path: typing.Union[str, Path], **kwargs): + if isinstance(config_path, str): + config_path = Path(config_path) + + if not config_path.exists(): + raise FileNotFoundError(f"Config file {config_path} not found") + + from _nebari.config import read_configuration + + return read_configuration(config_path, self.config_schema, **kwargs) + + @property + def ordered_stages(self): + return self.get_available_stages() + + @property + def config_schema(self): + classes = [schema.Main] + [ + _.input_schema for _ in self.ordered_stages if _.input_schema is not None + ] + return type("ConfigSchema", tuple(classes), {}) + + +nebari_plugin_manager = NebariPluginManager() diff --git a/src/nebari/schema.py b/src/nebari/schema.py new file mode 100644 index 000000000..b3a5c169a --- /dev/null +++ b/src/nebari/schema.py @@ -0,0 +1,94 @@ +import enum + +import pydantic +from ruamel.yaml import yaml_object + +from _nebari.utils import escape_string, yaml +from _nebari.version import __version__, rounded_ver_parse + +# Regex for suitable project names +namestr_regex = r"^[A-Za-z][A-Za-z\-_]*[A-Za-z]$" +letter_dash_underscore_pydantic = pydantic.constr(regex=namestr_regex) + +email_regex = "^[^ @]+@[^ @]+\\.[^ @]+$" +email_pydantic = pydantic.constr(regex=email_regex) + + +class Base(pydantic.BaseModel): + ... + + class Config: + extra = "forbid" + validate_assignment = True + allow_population_by_field_name = True + + +@yaml_object(yaml) +class ProviderEnum(str, enum.Enum): + local = "local" + existing = "existing" + do = "do" + aws = "aws" + gcp = "gcp" + azure = "azure" + + @classmethod + def to_yaml(cls, representer, node): + return representer.represent_str(node.value) + + +class Main(Base): + project_name: letter_dash_underscore_pydantic + namespace: letter_dash_underscore_pydantic = "dev" + provider: ProviderEnum = ProviderEnum.local + # In nebari_version only use major.minor.patch version - drop any pre/post/dev suffixes + nebari_version: str = __version__ + + prevent_deploy: bool = ( + False # Optional, but will be given default value if not present + ) + + # If the nebari_version in the schema is old + # we must tell the user to first run nebari upgrade + @pydantic.validator("nebari_version", pre=True, always=True) + def check_default(cls, v): + """ + Always called even if nebari_version is not supplied at all (so defaults to ''). That way we can give a more helpful error message. + """ + if not cls.is_version_accepted(v): + if v == "": + v = "not supplied" + raise ValueError( + f"nebari_version in the config file must be equivalent to {__version__} to be processed by this version of nebari (your config file version is {v})." + " Install a different version of nebari or run nebari upgrade to ensure your config file is compatible." + ) + return v + + @classmethod + def is_version_accepted(cls, v): + return v != "" and rounded_ver_parse(v) == rounded_ver_parse(__version__) + + @property + def escaped_project_name(self): + """Escaped project-name know to be compatible with all clouds""" + project_name = self.project_name + + if self.provider == ProviderEnum.azure and "-" in project_name: + project_name = escape_string(project_name, escape_char="a") + + if self.provider == ProviderEnum.aws and project_name.startswith("aws"): + project_name = "a" + project_name + + if len(project_name) > 16: + project_name = project_name[:16] + + return project_name + + +def is_version_accepted(v): + """ + Given a version string, return boolean indicating whether + nebari_version in the nebari-config.yaml would be acceptable + for deployment with the current Nebari package. + """ + return Main.is_version_accepted(v) diff --git a/tests/tests_deployment/test_jupyterhub_ssh.py b/tests/tests_deployment/test_jupyterhub_ssh.py index 8ecfa4f40..0a310f7fa 100644 --- a/tests/tests_deployment/test_jupyterhub_ssh.py +++ b/tests/tests_deployment/test_jupyterhub_ssh.py @@ -1,15 +1,13 @@ import re +import string import uuid import paramiko import pytest +from tests_deployment import constants +from tests_deployment.utils import get_jupyterhub_token, monkeypatch_ssl_context -from tests.tests_deployment import constants -from tests.tests_deployment.utils import ( - escape_string, - get_jupyterhub_token, - monkeypatch_ssl_context, -) +from _nebari.utils import escape_string monkeypatch_ssl_context() @@ -101,7 +99,10 @@ def test_exact_jupyterhub_ssh(paramiko_object): ("pwd", f"/home/{constants.KEYCLOAK_USERNAME}"), ("echo $HOME", f"/home/{constants.KEYCLOAK_USERNAME}"), ("conda activate default && echo $CONDA_PREFIX", "/opt/conda/envs/default"), - ("hostname", f"jupyter-{escape_string(constants.KEYCLOAK_USERNAME)}"), + ( + "hostname", + f"jupyter-{escape_string(constants.KEYCLOAK_USERNAME, safe=set(string.ascii_lowercase + string.digits), escape_char='-').lower()}", + ), ] for command, output in commands_exact: diff --git a/tests/tests_deployment/utils.py b/tests/tests_deployment/utils.py index 5be0f948d..327de5330 100644 --- a/tests/tests_deployment/utils.py +++ b/tests/tests_deployment/utils.py @@ -1,8 +1,6 @@ import re import ssl -import string -import escapism import requests from tests.tests_deployment import constants @@ -61,11 +59,3 @@ def _inner(*args, **kwargs): sslcontext = ssl.create_default_context() ssl.create_default_context = create_default_context(sslcontext) - - -def escape_string(s): - # https://github.com/jupyterhub/kubespawner/blob/main/kubespawner/spawner.py#L1681 - # Make sure username and servername match the restrictions for DNS labels - # Note: '-' is not in safe_chars, as it is being used as escape character - safe_chars = set(string.ascii_lowercase + string.digits) - return escapism.escape(s, safe=safe_chars, escape_char="-").lower() diff --git a/tests/tests_unit/conftest.py b/tests/tests_unit/conftest.py index ba588d1b1..72b5b18b6 100644 --- a/tests/tests_unit/conftest.py +++ b/tests/tests_unit/conftest.py @@ -1,18 +1,142 @@ +import typing from unittest.mock import Mock import pytest -from tests.tests_unit.utils import INIT_INPUTS, NEBARI_CONFIG_FN, PRESERVED_DIR +from _nebari.config import write_configuration +from _nebari.initialize import render_config +from _nebari.render import render_template +from _nebari.stages.bootstrap import CiEnum +from _nebari.stages.kubernetes_keycloak import AuthenticationEnum +from _nebari.stages.terraform_state import TerraformStateEnum +from nebari import schema +from nebari.plugins import nebari_plugin_manager -@pytest.fixture(params=INIT_INPUTS) -def setup_fixture(request, monkeypatch, tmp_path): - """This fixture helps simplify writing tests by: - - parametrizing the different cloud provider inputs in a single place - - creating a tmp directory (and file) for the `nebari-config.yaml` to be save to - - monkeypatching functions that call out to external APIs. - """ - render_config_inputs = request.param +@pytest.fixture(autouse=True) +def mock_all_cloud_methods(monkeypatch): + def _mock_kubernetes_versions( + k8s_versions: typing.List[str] = ["1.18", "1.19", "1.20"], + grab_latest_version=False, + ): + # template for all `kubernetes_versions` calls + # monkeypatched to avoid making outbound API calls in CI + m = Mock() + m.return_value = k8s_versions + if grab_latest_version: + m.return_value = k8s_versions[-1] + return m + + def _mock_return_value(return_value): + m = Mock() + m.return_value = return_value + return m + + def _mock_aws_availability_zones(region="us-west-2"): + m = Mock() + m.return_value = ["us-west-2a", "us-west-2b"] + return m + + MOCK_VALUES = { + # AWS + "_nebari.provider.cloud.amazon_web_services.kubernetes_versions": [ + "1.18", + "1.19", + "1.20", + ], + "_nebari.provider.cloud.amazon_web_services.check_credentials": None, + "_nebari.provider.cloud.amazon_web_services.regions": [ + "us-east-1", + "us-west-2", + ], + "_nebari.provider.cloud.amazon_web_services.zones": [ + "us-west-2a", + "us-west-2b", + ], + "_nebari.provider.cloud.amazon_web_services.instances": { + "m5.xlarge": "m5.xlarge", + "m5.2xlarge": "m5.2xlarge", + }, + # Azure + "_nebari.provider.cloud.azure_cloud.kubernetes_versions": [ + "1.18", + "1.19", + "1.20", + ], + "_nebari.provider.cloud.azure_cloud.check_credentials": None, + # Digital Ocean + "_nebari.provider.cloud.digital_ocean.kubernetes_versions": [ + "1.19.2-do.3", + "1.20.2-do.0", + "1.21.5-do.0", + ], + "_nebari.provider.cloud.digital_ocean.check_credentials": None, + "_nebari.provider.cloud.digital_ocean.regions": [ + {"name": "New York 3", "slug": "nyc3"}, + ], + "_nebari.provider.cloud.digital_ocean.instances": [ + {"name": "s-2vcpu-4gb", "slug": "s-2vcpu-4gb"}, + {"name": "g-2vcpu-8gb", "slug": "g-2vcpu-8gb"}, + {"name": "g-8vcpu-32gb", "slug": "g-8vcpu-32gb"}, + {"name": "g-4vcpu-16gb", "slug": "g-4vcpu-16gb"}, + ], + # Google Cloud + "_nebari.provider.cloud.google_cloud.kubernetes_versions": [ + "1.18", + "1.19", + "1.20", + ], + "_nebari.provider.cloud.google_cloud.check_credentials": None, + } + + for attribute_path, return_value in MOCK_VALUES.items(): + monkeypatch.setattr(attribute_path, _mock_return_value(return_value)) + + monkeypatch.setenv("PROJECT_ID", "pytest-project") + + +@pytest.fixture( + params=[ + # project, namespace, domain, cloud_provider, ci_provider, auth_provider + ( + "pytestdo", + "dev", + "do.nebari.dev", + schema.ProviderEnum.do, + CiEnum.github_actions, + AuthenticationEnum.password, + ), + ( + "pytestaws", + "dev", + "aws.nebari.dev", + schema.ProviderEnum.aws, + CiEnum.github_actions, + AuthenticationEnum.password, + ), + ( + "pytestgcp", + "dev", + "gcp.nebari.dev", + schema.ProviderEnum.gcp, + CiEnum.github_actions, + AuthenticationEnum.password, + ), + ( + "pytestazure", + "dev", + "azure.nebari.dev", + schema.ProviderEnum.azure, + CiEnum.github_actions, + AuthenticationEnum.password, + ), + ] +) +def nebari_config_options(request) -> schema.Main: + """This fixtures creates a set of nebari configurations for tests""" + DEFAULT_GH_REPO = "github.com/test/test" + DEFAULT_TERRAFORM_STATE = TerraformStateEnum.remote + ( project, namespace, @@ -20,48 +144,40 @@ def setup_fixture(request, monkeypatch, tmp_path): cloud_provider, ci_provider, auth_provider, - ) = render_config_inputs + ) = request.param - def _mock_kubernetes_versions(grab_latest_version=False): - # template for all `kubernetes_versions` calls - # monkeypatched to avoid making outbound API calls in CI - k8s_versions = ["1.18", "1.19", "1.20"] - m = Mock() - m.return_value = k8s_versions - if grab_latest_version: - m.return_value = k8s_versions[-1] - return m + return dict( + project_name=project, + namespace=namespace, + nebari_domain=domain, + cloud_provider=cloud_provider, + ci_provider=ci_provider, + auth_provider=auth_provider, + repository=DEFAULT_GH_REPO, + repository_auto_provision=False, + auth_auto_provision=False, + terraform_state=DEFAULT_TERRAFORM_STATE, + disable_prompt=True, + ) + + +@pytest.fixture +def nebari_config(nebari_config_options): + return nebari_plugin_manager.config_schema.parse_obj( + render_config(**nebari_config_options) + ) + + +@pytest.fixture +def nebari_stages(): + return nebari_plugin_manager.ordered_stages + + +@pytest.fixture +def nebari_render(nebari_config, nebari_stages, tmp_path): + NEBARI_CONFIG_FN = "nebari-config.yaml" - if cloud_provider == "aws": - monkeypatch.setattr( - "_nebari.utils.amazon_web_services.kubernetes_versions", - _mock_kubernetes_versions(), - ) - elif cloud_provider == "azure": - monkeypatch.setattr( - "_nebari.utils.azure_cloud.kubernetes_versions", - _mock_kubernetes_versions(), - ) - elif cloud_provider == "do": - monkeypatch.setattr( - "_nebari.utils.digital_ocean.kubernetes_versions", - _mock_kubernetes_versions(), - ) - elif cloud_provider == "gcp": - monkeypatch.setattr( - "_nebari.utils.google_cloud.kubernetes_versions", - _mock_kubernetes_versions(), - ) - - output_directory = tmp_path / f"{cloud_provider}_output_dir" - output_directory.mkdir() - nebari_config_loc = output_directory / NEBARI_CONFIG_FN - - # data that should NOT be deleted when `nebari render` is called - # see test_render.py::test_remove_existing_renders - preserved_directory = output_directory / PRESERVED_DIR - preserved_directory.mkdir() - preserved_filename = preserved_directory / "file.txt" - preserved_filename.write_text("This is a test...") - - yield (nebari_config_loc, render_config_inputs) + config_filename = tmp_path / NEBARI_CONFIG_FN + write_configuration(config_filename, nebari_config) + render_template(tmp_path, nebari_config, nebari_stages) + return tmp_path, config_filename diff --git a/tests/tests_unit/test_cli.py b/tests/tests_unit/test_cli.py index a45072d70..d8a4e423b 100644 --- a/tests/tests_unit/test_cli.py +++ b/tests/tests_unit/test_cli.py @@ -2,8 +2,8 @@ import pytest -from _nebari.schema import InitInputs -from _nebari.utils import load_yaml +from _nebari.subcommands.init import InitInputs +from nebari.plugins import nebari_plugin_manager PROJECT_NAME = "clitest" DOMAIN_NAME = "clitest.dev" @@ -13,7 +13,7 @@ "namespace, auth_provider, ci_provider, ssl_cert_email", ( [None, None, None, None], - ["prod", "github", "github-actions", "it@acme.org"], + ["prod", "password", "github-actions", "it@acme.org"], ), ) def test_nebari_init(tmp_path, namespace, auth_provider, ci_provider, ssl_cert_email): @@ -48,34 +48,20 @@ def test_nebari_init(tmp_path, namespace, auth_provider, ci_provider, ssl_cert_e subprocess.run(command, cwd=tmp_path, check=True) - config = load_yaml(tmp_path / "nebari-config.yaml") + config = nebari_plugin_manager.read_config(tmp_path / "nebari-config.yaml") - assert config.get("namespace") == namespace - assert ( - config.get("security", {}).get("authentication", {}).get("type").lower() - == auth_provider - ) - ci_cd = config.get("ci_cd", None) - if ci_cd: - assert ci_cd.get("type", {}) == ci_provider - else: - assert ci_cd == ci_provider - acme_email = config.get("certificate", None) - if acme_email: - assert acme_email.get("acme_email") == ssl_cert_email - else: - assert acme_email == ssl_cert_email - - -def test_python_invocation(): - def run(command): - return subprocess.run( - command, check=True, capture_output=True, text=True - ).stdout.strip() - - command = ["nebari", "--version"] + assert config.namespace == namespace + assert config.security.authentication.type.lower() == auth_provider + assert config.ci_cd.type == ci_provider + assert config.certificate.acme_email == ssl_cert_email - actual = run(["python", "-m", *command]) - expected = run(command) - assert actual == expected +@pytest.mark.parametrize( + "command", + ( + ["nebari", "--version"], + ["nebari", "info"], + ), +) +def test_nebari_commands_no_args(command): + subprocess.run(command, check=True, capture_output=True, text=True).stdout.strip() diff --git a/tests/tests_unit/test_init.py b/tests/tests_unit/test_init.py index a64d511fc..4ad980a23 100644 --- a/tests/tests_unit/test_init.py +++ b/tests/tests_unit/test_init.py @@ -1,42 +1,45 @@ import pytest -from .utils import render_config_partial +from _nebari.initialize import render_config +from _nebari.stages.bootstrap import CiEnum +from _nebari.stages.kubernetes_keycloak import AuthenticationEnum +from nebari.schema import ProviderEnum @pytest.mark.parametrize( - "k8s_version, expected", [(None, True), ("1.19", True), (1000, ValueError)] + "k8s_version, cloud_provider, expected", + [ + (None, ProviderEnum.aws, None), + ("1.19", ProviderEnum.aws, "1.19"), + # (1000, ProviderEnum.aws, ValueError), # TODO: fix this + ], ) -def test_init(setup_fixture, k8s_version, expected): - (nebari_config_loc, render_config_inputs) = setup_fixture - ( - project, - namespace, - domain, - cloud_provider, - ci_provider, - auth_provider, - ) = render_config_inputs - - # pass "unsupported" kubernetes version to `render_config` - # resulting in a `ValueError` +def test_render_config(mock_all_cloud_methods, k8s_version, cloud_provider, expected): if type(expected) == type and issubclass(expected, Exception): with pytest.raises(expected): - render_config_partial( - project_name=project, - namespace=namespace, - nebari_domain=domain, + config = render_config( + project_name="test", + namespace="dev", + nebari_domain="test.dev", cloud_provider=cloud_provider, - ci_provider=ci_provider, - auth_provider=auth_provider, + ci_provider=CiEnum.none, + auth_provider=AuthenticationEnum.password, kubernetes_version=k8s_version, ) + assert config else: - render_config_partial( - project_name=project, - namespace=namespace, - nebari_domain=domain, + config = render_config( + project_name="test", + namespace="dev", + nebari_domain="test.dev", cloud_provider=cloud_provider, - ci_provider=ci_provider, - auth_provider=auth_provider, + ci_provider=CiEnum.none, + auth_provider=AuthenticationEnum.password, kubernetes_version=k8s_version, ) + + assert ( + config.get("amazon_web_services", {}).get("kubernetes_version") == expected + ) + + assert config["project_name"] == "test" diff --git a/tests/tests_unit/test_links.py b/tests/tests_unit/test_links.py index 8df7ee1e6..a393391ce 100644 --- a/tests/tests_unit/test_links.py +++ b/tests/tests_unit/test_links.py @@ -1,7 +1,7 @@ import pytest import requests -from _nebari.utils import AWS_ENV_DOCS, AZURE_ENV_DOCS, DO_ENV_DOCS, GCP_ENV_DOCS +from _nebari.constants import AWS_ENV_DOCS, AZURE_ENV_DOCS, DO_ENV_DOCS, GCP_ENV_DOCS LINKS_TO_TEST = [ DO_ENV_DOCS, diff --git a/tests/tests_unit/test_render.py b/tests/tests_unit/test_render.py index 2ec7f407a..06b5e5a1f 100644 --- a/tests/tests_unit/test_render.py +++ b/tests/tests_unit/test_render.py @@ -1,119 +1,41 @@ import os -from pathlib import Path -import pytest -from ruamel.yaml import YAML +from _nebari.stages.bootstrap import CiEnum +from nebari import schema +from nebari.plugins import nebari_plugin_manager -from _nebari.render import render_template, set_env_vars_in_config -from .utils import PRESERVED_DIR, render_config_partial - - -@pytest.fixture -def write_nebari_config_to_file(setup_fixture): - nebari_config_loc, render_config_inputs = setup_fixture - ( - project, - namespace, - domain, - cloud_provider, - ci_provider, - auth_provider, - ) = render_config_inputs - - config = render_config_partial( - project_name=project, - namespace=namespace, - nebari_domain=domain, - cloud_provider=cloud_provider, - ci_provider=ci_provider, - auth_provider=auth_provider, - kubernetes_version=None, +def test_render_config(nebari_render): + output_directory, config_filename = nebari_render + config = nebari_plugin_manager.read_config(config_filename) + assert {"nebari-config.yaml", "stages", ".gitignore"} <= set( + os.listdir(output_directory) ) - - # write to nebari_config.yaml - yaml = YAML(typ="unsafe", pure=True) - yaml.dump(config, nebari_config_loc) - - render_template(nebari_config_loc.parent, nebari_config_loc) - - yield setup_fixture - - -def test_get_secret_config_entries(monkeypatch): - sec1 = "secret1" - sec2 = "nestedsecret1" - config_orig = { - "key1": "value1", - "key2": "NEBARI_SECRET_secret_val", - "key3": { - "nested_key1": "nested_value1", - "nested_key2": "NEBARI_SECRET_nested_secret_val", - }, - } - expected = { - "key1": "value1", - "key2": sec1, - "key3": { - "nested_key1": "nested_value1", - "nested_key2": sec2, - }, - } - - # should raise error if implied env var is not set - with pytest.raises(EnvironmentError): - config = config_orig.copy() - set_env_vars_in_config(config) - - monkeypatch.setenv("secret_val", sec1, prepend=False) - monkeypatch.setenv("nested_secret_val", sec2, prepend=False) - config = config_orig.copy() - set_env_vars_in_config(config) - assert config == expected - - -def test_render_template(write_nebari_config_to_file): - nebari_config_loc, render_config_inputs = write_nebari_config_to_file - ( - project, - namespace, - domain, - cloud_provider, - ci_provider, - auth_provider, - ) = render_config_inputs - - yaml = YAML() - nebari_config_json = yaml.load(nebari_config_loc.read_text()) - - assert nebari_config_json["project_name"] == project - assert nebari_config_json["namespace"] == namespace - assert nebari_config_json["domain"] == domain - assert nebari_config_json["provider"] == cloud_provider - - -def test_exists_after_render(write_nebari_config_to_file): - items_to_check = [ - ".gitignore", - "stages", - "nebari-config.yaml", - PRESERVED_DIR, - ] - - nebari_config_loc, _ = write_nebari_config_to_file - - yaml = YAML() - nebari_config_json = yaml.load(nebari_config_loc.read_text()) - - # list of files/dirs available after `nebari render` command - ls = os.listdir(Path(nebari_config_loc).parent.resolve()) - - cicd = nebari_config_json.get("ci_cd", {}).get("type", None) - - if cicd == "github-actions": - items_to_check.append(".github") - elif cicd == "gitlab-ci": - items_to_check.append(".gitlab-ci.yml") - - for i in items_to_check: - assert i in ls + assert { + "07-kubernetes-services", + "02-infrastructure", + "01-terraform-state", + "05-kubernetes-keycloak", + "08-nebari-tf-extensions", + "06-kubernetes-keycloak-configuration", + "04-kubernetes-ingress", + "03-kubernetes-initialize", + } == set(os.listdir(output_directory / "stages")) + + if config.provider == schema.ProviderEnum.do: + assert (output_directory / "stages" / "01-terraform-state/do").is_dir() + assert (output_directory / "stages" / "02-infrastructure/do").is_dir() + elif config.provider == schema.ProviderEnum.aws: + assert (output_directory / "stages" / "01-terraform-state/aws").is_dir() + assert (output_directory / "stages" / "02-infrastructure/aws").is_dir() + elif config.provider == schema.ProviderEnum.gcp: + assert (output_directory / "stages" / "01-terraform-state/gcp").is_dir() + assert (output_directory / "stages" / "02-infrastructure/gcp").is_dir() + elif config.provider == schema.ProviderEnum.azure: + assert (output_directory / "stages" / "01-terraform-state/azure").is_dir() + assert (output_directory / "stages" / "02-infrastructure/azure").is_dir() + + if config.ci_cd.type == CiEnum.github_actions: + assert (output_directory / ".github/workflows/").is_dir() + elif config.ci_cd.type == CiEnum.gitlab_ci: + assert (output_directory / ".gitlab-ci.yml").is_file() diff --git a/tests/tests_unit/test_schema.py b/tests/tests_unit/test_schema.py index d4d8cf878..2e733a302 100644 --- a/tests/tests_unit/test_schema.py +++ b/tests/tests_unit/test_schema.py @@ -1,27 +1,50 @@ -import _nebari.schema - -from .utils import render_config_partial - - -def test_schema(setup_fixture): - (nebari_config_loc, render_config_inputs) = setup_fixture - ( - project, - namespace, - domain, - cloud_provider, - ci_provider, - auth_provider, - ) = render_config_inputs - - config = render_config_partial( - project_name=project, - namespace=namespace, - nebari_domain=domain, - cloud_provider=cloud_provider, - ci_provider=ci_provider, - auth_provider=auth_provider, - kubernetes_version=None, - ) - - _nebari.schema.verify(config) +from nebari import schema +from nebari.plugins import nebari_plugin_manager + + +def test_minimal_schema(): + config = nebari_plugin_manager.config_schema(project_name="test") + assert config.project_name == "test" + assert config.storage.conda_store == "200Gi" + + +def test_minimal_schema_from_file(tmp_path): + filename = tmp_path / "nebari-config.yaml" + with filename.open("w") as f: + f.write("project_name: test\n") + + config = nebari_plugin_manager.read_config(filename) + assert config.project_name == "test" + assert config.storage.conda_store == "200Gi" + + +def test_minimal_schema_from_file_with_env(tmp_path, monkeypatch): + filename = tmp_path / "nebari-config.yaml" + with filename.open("w") as f: + f.write("project_name: test\n") + + monkeypatch.setenv("NEBARI_SECRET__project_name", "env") + monkeypatch.setenv("NEBARI_SECRET__storage__conda_store", "1000Gi") + + config = nebari_plugin_manager.read_config(filename) + assert config.project_name == "env" + assert config.storage.conda_store == "1000Gi" + + +def test_minimal_schema_from_file_without_env(tmp_path, monkeypatch): + filename = tmp_path / "nebari-config.yaml" + with filename.open("w") as f: + f.write("project_name: test\n") + + monkeypatch.setenv("NEBARI_SECRET__project_name", "env") + monkeypatch.setenv("NEBARI_SECRET__storage__conda_store", "1000Gi") + + config = nebari_plugin_manager.read_config(filename, read_environment=False) + assert config.project_name == "test" + assert config.storage.conda_store == "200Gi" + + +def test_render_schema(nebari_config): + assert isinstance(nebari_config, schema.Main) + assert nebari_config.project_name == f"pytest{nebari_config.provider.value}" + assert nebari_config.namespace == "dev" diff --git a/tests/tests_unit/test_upgrade.py b/tests/tests_unit/test_upgrade.py index f53d980f4..0946dcd99 100644 --- a/tests/tests_unit/test_upgrade.py +++ b/tests/tests_unit/test_upgrade.py @@ -2,8 +2,9 @@ import pytest -from _nebari.upgrade import do_upgrade, load_yaml, verify +from _nebari.upgrade import do_upgrade from _nebari.version import __version__, rounded_ver_parse +from nebari.plugins import nebari_plugin_manager @pytest.fixture @@ -69,32 +70,24 @@ def test_upgrade_4_0( return # Check the resulting YAML - config = load_yaml(tmp_qhub_config) + config = nebari_plugin_manager.read_config(tmp_qhub_config) - verify( - config - ) # Would raise an error if invalid by current Nebari version's standards - - assert len(config["security"]["keycloak"]["initial_root_password"]) == 16 - - assert "users" not in config["security"] - assert "groups" not in config["security"] + assert len(config.security.keycloak.initial_root_password) == 16 + assert not hasattr(config.security, "users") + assert not hasattr(config.security, "groups") __rounded_version__ = ".".join([str(c) for c in rounded_ver_parse(__version__)]) # Check image versions have been bumped up assert ( - config["default_images"]["jupyterhub"] + config.default_images.jupyterhub == f"quansight/nebari-jupyterhub:v{__rounded_version__}" ) assert ( - config["profiles"]["jupyterlab"][0]["kubespawner_override"]["image"] + config.profiles.jupyterlab[0].kubespawner_override.image == f"quansight/nebari-jupyterlab:v{__rounded_version__}" ) - - assert ( - config.get("security", {}).get("authentication", {}).get("type", "") != "custom" - ) + assert config.security.authentication.type != "custom" # Keycloak import users json assert (