Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
314c19d
modified: src/connectedk8s/azext_connectedk8s/_constants.py
rohan-dassani Dec 6, 2022
d34a3cb
modified: src/connectedk8s/azext_connectedk8s/custom.py
rohan-dassani Dec 6, 2022
6842e24
modified: src/connectedk8s/azext_connectedk8s/custom.py
rohan-dassani Dec 6, 2022
9aafd26
modified: src/connectedk8s/HISTORY.rst
rohan-dassani Dec 6, 2022
69ce330
modified: src/connectedk8s/azext_connectedk8s/_constants.py
rohan-dassani Dec 26, 2022
8b7f6d3
modified: src/connectedk8s/HISTORY.rst
rohan-dassani Dec 26, 2022
60f1b98
modified: src/connectedk8s/HISTORY.rst
rohan-dassani Jan 3, 2023
bd81d60
modified: src/connectedk8s/azext_connectedk8s/custom.py
rohan-dassani Jan 3, 2023
257478a
modified: src/connectedk8s/azext_connectedk8s/custom.py
rohan-dassani Jan 3, 2023
f862fc8
modified: src/connectedk8s/azext_connectedk8s/custom.py
rohan-dassani Jan 4, 2023
2407073
modified: src/connectedk8s/azext_connectedk8s/custom.py
rohan-dassani Jan 4, 2023
1b90a22
modified: src/connectedk8s/azext_connectedk8s/_constants.py
rohan-dassani Jan 4, 2023
01b9cf2
modified: src/connectedk8s/azext_connectedk8s/custom.py
rohan-dassani Jan 5, 2023
6e581ba
modified: src/connectedk8s/azext_connectedk8s/custom.py
rohan-dassani Jan 5, 2023
67ceb35
modified: .github/CODEOWNERS
rohan-dassani Jan 31, 2023
516eb42
modified: src/connectedk8s/azext_connectedk8s/_precheckutils.py
rohan-dassani Jan 31, 2023
f39dc33
Revert " modified: .github/CODEOWNERS"
rohan-dassani Jan 31, 2023
df18eb6
Merge branch 'cli_heuristics_change' of https://github.com/rohan-dass…
rohan-dassani Jan 31, 2023
63e0c33
Merge https://github.com/Azure/azure-cli-extensions into cli_heuristi…
rohan-dassani Jan 31, 2023
0d4ce97
modified: src/connectedk8s/azext_connectedk8s/custom.py
rohan-dassani Jan 31, 2023
8288271
Merge branch 'cli_heuristics_change' of https://github.com/rohan-dass…
rohan-dassani Jan 31, 2023
4bd6385
modified: src/connectedk8s/azext_connectedk8s/_precheckutils.py
rohan-dassani Jan 31, 2023
1316c35
modified: src/connectedk8s/HISTORY.rst
rohan-dassani Jan 31, 2023
c67c88c
modified: src/connectedk8s/azext_connectedk8s/_precheckutils.py
rohan-dassani Jan 31, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions src/connectedk8s/HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,13 @@

Release History
===============
1.3.10
++++++

* Added CLI heuristics change
* Added AKS IOT infra support
* Bug Fix in precheckutils

1.3.9
++++++

Expand Down
4 changes: 2 additions & 2 deletions src/connectedk8s/azext_connectedk8s/_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@

# pylint: disable=line-too-long

Distribution_Enum_Values = ["auto", "generic", "openshift", "rancher_rke", "kind", "k3s", "minikube", "gke", "eks", "aks", "aks_management", "aks_workload", "capz", "aks_engine", "tkg", "canonical", "karbon"]
Infrastructure_Enum_Values = ["auto", "generic", "azure", "aws", "gcp", "azure_stack_hci", "azure_stack_hub", "azure_stack_edge", "vsphere", "windows_server"]
Distribution_Enum_Values = ["generic", "openshift", "rancher_rke", "kind", "k3s", "minikube", "gke", "eks", "aks", "aks_management", "aks_workload", "capz", "aks_engine", "tkg", "canonical", "karbon", "aks_edge_k3s", "aks_edge_k8s"]
Infrastructure_Enum_Values = ["generic", "azure", "aws", "gcp", "azure_stack_hci", "azure_stack_hub", "azure_stack_edge", "vsphere", "windows_server", "Windows 11 Enterprise", "Windows 11 Enterprise N", "Windows 11 IoT Enterprise", "Windows 11 Pro", "Windows 10 Enterprise", "Windows 10 Enterprise N", "Windows 10 Enterprise LTSC 2021", "Windows 10 Enterprise N LTSC 2021", "Windows 10 IoT Enterprise", "Windows 10 IoT Enterprise LTSC 2021", "Windows 10 Pro", "Windows 10 Enterprise LTSC 2019", "Windows 10 Enterprise N LTSC 2019", "Windows 10 IoT Enterprise LTSC 2019", "Windows Server 2022", "Windows Server 2022 Datacenter", "Windows Server 2022 Standard", "Windows Server 2019", "Windows Server 2019 Datacenter", "Windows Server 2019 Standard"]
AHB_Enum_Values = ["True", "False", "NotApplicable"]
Feature_Values = ["cluster-connect", "azure-rbac", "custom-locations"]
CRD_FOR_FORCE_DELETE = ["arccertificates.clusterconfig.azure.com", "azureclusteridentityrequests.clusterconfig.azure.com", "azureextensionidentities.clusterconfig.azure.com", "connectedclusters.arc.azure.com", "customlocationsettings.clusterconfig.azure.com", "extensionconfigs.clusterconfig.azure.com", "gitconfigs.clusterconfig.azure.com"]
Expand Down
51 changes: 27 additions & 24 deletions src/connectedk8s/azext_connectedk8s/_precheckutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from azext_connectedk8s._client_factory import _resource_client_factory, _resource_providers_client
import azext_connectedk8s._constants as consts
import azext_connectedk8s._utils as azext_utils
import azext_connectedk8s.custom as custom
from kubernetes import client as kube_client
from azure.cli.core import get_default_cli
from azure.cli.core.azclierror import CLIInternalError, ClientRequestError, ArgumentUsageError, ManualInterrupt, AzureResponseError, AzureInternalError, ValidationError
Expand Down Expand Up @@ -96,8 +97,8 @@ def executing_cluster_diagnostic_checks_job(corev1_api_instance, batchv1_api_ins
job_name = "cluster-diagnostic-checks-job"
# Setting the log output as Empty
cluster_diagnostic_checks_container_log = ""

cmd_helm_delete = [helm_client_location, "uninstall", "cluster-diagnostic-checks", "-n", "azure-arc-release"]
release_namespace = azext_utils.get_release_namespace(kube_config, kube_context, helm_client_location, "cluster-diagnostic-checks")
cmd_helm_delete = [helm_client_location, "delete", "cluster-diagnostic-checks", "-n", "azure-arc-release"]
if kube_config:
cmd_helm_delete.extend(["--kubeconfig", kube_config])
if kube_context:
Expand All @@ -107,28 +108,30 @@ def executing_cluster_diagnostic_checks_job(corev1_api_instance, batchv1_api_ins
try:
# Executing the cluster diagnostic checks job yaml
config.load_kube_config(kube_config, kube_context)
# Attempting deletion of cluster diagnostic checks resources to handle the scenario if any stale resources are present
response_kubectl_delete_helm = Popen(cmd_helm_delete, stdout=PIPE, stderr=PIPE)
output_kubectl_delete_helm, error_kubectl_delete_helm = response_kubectl_delete_helm.communicate()
# If any error occured while execution of delete command
if (response_kubectl_delete_helm != 0):
# Converting the string of multiple errors to list
error_msg_list = error_kubectl_delete_helm.decode("ascii").split("\n")
error_msg_list.pop(-1)
valid_exception_list = []
# Checking if any exception occured or not
exception_occured_counter = 0
for ind_errors in error_msg_list:
if('not found' in ind_errors or 'deleted' in ind_errors):
pass
else:
valid_exception_list.append(ind_errors)
exception_occured_counter = 1
# If any exception occured we will print the exception and return
if exception_occured_counter == 1:
logger.warning("Cleanup of previous diagnostic checks helm release failed and hence couldn't install the new helm release. Please cleanup older release using \"helm delete cluster-diagnostic-checks -n azuer-arc-release\" and try onboarding again")
telemetry.set_exception(exception=error_kubectl_delete_helm.decode("ascii"), fault_type=consts.Cluster_Diagnostic_Checks_Release_Cleanup_Failed, summary="Error while executing cluster diagnostic checks Job")
return
# checking existence of the release and if present we delete the stale release
if release_namespace is not None:
# Attempting deletion of cluster diagnostic checks resources to handle the scenario if any stale resources are present
response_kubectl_delete_helm = Popen(cmd_helm_delete, stdout=PIPE, stderr=PIPE)
output_kubectl_delete_helm, error_kubectl_delete_helm = response_kubectl_delete_helm.communicate()
# If any error occured while execution of delete command
if (response_kubectl_delete_helm.returncode != 0):
# Converting the string of multiple errors to list
error_msg_list = error_kubectl_delete_helm.decode("ascii").split("\n")
error_msg_list.pop(-1)
valid_exception_list = []
# Checking if any exception occured or not
exception_occured_counter = 0
for ind_errors in error_msg_list:
if('not found' in ind_errors or 'deleted' in ind_errors):
pass
else:
valid_exception_list.append(ind_errors)
exception_occured_counter = 1
# If any exception occured we will print the exception and return
if exception_occured_counter == 1:
logger.warning("Cleanup of previous diagnostic checks helm release failed and hence couldn't install the new helm release. Please cleanup older release using \"helm delete cluster-diagnostic-checks -n azure-arc-release\" and try onboarding again")
telemetry.set_exception(exception=error_kubectl_delete_helm.decode("ascii"), fault_type=consts.Cluster_Diagnostic_Checks_Release_Cleanup_Failed, summary="Error while executing cluster diagnostic checks Job")
return

chart_path = azext_utils.get_chart_path(consts.Cluster_Diagnostic_Checks_Job_Registry_Path, kube_config, kube_context, helm_client_location, consts.Pre_Onboarding_Helm_Charts_Folder_Name, consts.Pre_Onboarding_Helm_Charts_Release_Name)

Expand Down
25 changes: 25 additions & 0 deletions src/connectedk8s/azext_connectedk8s/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,6 +536,31 @@ def helm_install_release(chart_path, subscription_id, kubernetes_distro, kuberne
raise CLIInternalError("Unable to install helm release: " + error_helm_install.decode("ascii"))


def get_release_namespace(kube_config, kube_context, helm_client_location, release_name='azure-arc'):
cmd_helm_release = [helm_client_location, "list", "-a", "--all-namespaces", "--output", "json"]
if kube_config:
cmd_helm_release.extend(["--kubeconfig", kube_config])
if kube_context:
cmd_helm_release.extend(["--kube-context", kube_context])
response_helm_release = Popen(cmd_helm_release, stdout=PIPE, stderr=PIPE)
output_helm_release, error_helm_release = response_helm_release.communicate()
if response_helm_release.returncode != 0:
if 'forbidden' in error_helm_release.decode("ascii"):
telemetry.set_user_fault()
telemetry.set_exception(exception=error_helm_release.decode("ascii"), fault_type=consts.List_HelmRelease_Fault_Type,
summary='Unable to list helm release')
raise CLIInternalError("Helm list release failed: " + error_helm_release.decode("ascii"))
output_helm_release = output_helm_release.decode("ascii")
try:
output_helm_release = json.loads(output_helm_release)
except json.decoder.JSONDecodeError:
return None
for release in output_helm_release:
if release['name'] == release_name:
return release['namespace']
return None


def flatten(dd, separator='.', prefix=''):
try:
if isinstance(dd, dict):
Expand Down
Loading