diff --git a/src/databricks/labs/ucx/hive_metastore/grants.py b/src/databricks/labs/ucx/hive_metastore/grants.py index 6800b00537..d648afe46f 100644 --- a/src/databricks/labs/ucx/hive_metastore/grants.py +++ b/src/databricks/labs/ucx/hive_metastore/grants.py @@ -16,10 +16,8 @@ AzureServicePrincipalInfo, ) from databricks.labs.ucx.azure.access import ( - AzureResourcePermissions, StoragePermissionMapping, ) -from databricks.labs.ucx.azure.resources import AzureAPIClient, AzureResources from databricks.labs.ucx.config import WorkspaceConfig from databricks.labs.ucx.framework.crawlers import CrawlerBase from databricks.labs.ucx.framework.utils import escape_sql_identifier @@ -341,27 +339,19 @@ def __init__( ws: WorkspaceClient, backend: SqlBackend, spn_crawler: AzureServicePrincipalCrawler, - resource_permissions: AzureResourcePermissions, + installation: Installation, ): self._backend = backend self._ws = ws self._spn_crawler = spn_crawler - self._resource_permissions = resource_permissions + self._installation = installation @classmethod def for_cli(cls, ws: WorkspaceClient, installation: Installation): config = installation.load(WorkspaceConfig) sql_backend = StatementExecutionBackend(ws, config.warehouse_id) - locations = ExternalLocations(ws, sql_backend, config.inventory_database) - azure_client = AzureAPIClient( - ws.config.arm_environment.resource_manager_endpoint, - ws.config.arm_environment.service_management_endpoint, - ) - graph_client = AzureAPIClient("https://graph.microsoft.com", "https://graph.microsoft.com") - azurerm = AzureResources(azure_client, graph_client) - resource_permissions = AzureResourcePermissions(installation, ws, azurerm, locations) spn_crawler = AzureServicePrincipalCrawler(ws, sql_backend, config.inventory_database) - return cls(ws, sql_backend, spn_crawler, resource_permissions) + return cls(ws, sql_backend, spn_crawler, installation) def get_eligible_locations_principals(self) -> dict[str, dict]: cluster_locations = {} @@ -381,7 +371,9 @@ def get_eligible_locations_principals(self) -> dict[str, dict]: logger.error(msg) raise ResourceDoesNotExist(msg) from None - permission_mappings = self._resource_permissions.load() + permission_mappings = self._installation.load( + list[StoragePermissionMapping], filename="azure_storage_account_info.csv" + ) if len(permission_mappings) == 0: # if permission mapping is empty, raise an error to run principal_prefix cmd msg = ( diff --git a/src/databricks/labs/ucx/runtime.py b/src/databricks/labs/ucx/runtime.py index 928c56fa41..7c1bd2ba2f 100644 --- a/src/databricks/labs/ucx/runtime.py +++ b/src/databricks/labs/ucx/runtime.py @@ -11,8 +11,6 @@ from databricks.labs.ucx.assessment.init_scripts import GlobalInitScriptCrawler from databricks.labs.ucx.assessment.jobs import JobsCrawler, SubmitRunsCrawler from databricks.labs.ucx.assessment.pipelines import PipelinesCrawler -from databricks.labs.ucx.azure.access import AzureResourcePermissions -from databricks.labs.ucx.azure.resources import AzureAPIClient, AzureResources from databricks.labs.ucx.config import WorkspaceConfig from databricks.labs.ucx.framework.tasks import task, trigger from databricks.labs.ucx.hive_metastore import ExternalLocations, Mounts, TablesCrawler @@ -442,18 +440,8 @@ def migrate_external_tables_sync( mount_crawler = Mounts(sql_backend, ws, cfg.inventory_database) cluster_locations = {} if ws.config.is_azure: - locations = ExternalLocations(ws, sql_backend, cfg.inventory_database) - azure_client = AzureAPIClient( - ws.config.arm_environment.resource_manager_endpoint, - ws.config.arm_environment.service_management_endpoint, - ) - graph_client = AzureAPIClient("https://graph.microsoft.com", "https://graph.microsoft.com") - azurerm = AzureResources(azure_client, graph_client) - resource_permissions = AzureResourcePermissions(install, ws, azurerm, locations) spn_crawler = AzureServicePrincipalCrawler(ws, sql_backend, cfg.inventory_database) - cluster_locations = AzureACL( - ws, sql_backend, spn_crawler, resource_permissions - ).get_eligible_locations_principals() + cluster_locations = AzureACL(ws, sql_backend, spn_crawler, install).get_eligible_locations_principals() interactive_grants = PrincipalACL(ws, sql_backend, install, table_crawler, mount_crawler, cluster_locations) TablesMigrator( table_crawler, @@ -485,18 +473,8 @@ def migrate_dbfs_root_delta_tables( mount_crawler = Mounts(sql_backend, ws, cfg.inventory_database) cluster_locations = {} if ws.config.is_azure: - locations = ExternalLocations(ws, sql_backend, cfg.inventory_database) - azure_client = AzureAPIClient( - ws.config.arm_environment.resource_manager_endpoint, - ws.config.arm_environment.service_management_endpoint, - ) - graph_client = AzureAPIClient("https://graph.microsoft.com", "https://graph.microsoft.com") - azurerm = AzureResources(azure_client, graph_client) - resource_permissions = AzureResourcePermissions(install, ws, azurerm, locations) spn_crawler = AzureServicePrincipalCrawler(ws, sql_backend, cfg.inventory_database) - cluster_locations = AzureACL( - ws, sql_backend, spn_crawler, resource_permissions - ).get_eligible_locations_principals() + cluster_locations = AzureACL(ws, sql_backend, spn_crawler, install).get_eligible_locations_principals() interactive_grants = PrincipalACL(ws, sql_backend, install, table_crawler, mount_crawler, cluster_locations) TablesMigrator( table_crawler, diff --git a/tests/unit/hive_metastore/test_principal_grants.py b/tests/unit/hive_metastore/test_principal_grants.py index e485b34897..31228f6092 100644 --- a/tests/unit/hive_metastore/test_principal_grants.py +++ b/tests/unit/hive_metastore/test_principal_grants.py @@ -13,12 +13,10 @@ AzureServicePrincipalInfo, ServicePrincipalClusterMapping, ) -from databricks.labs.ucx.azure.access import AzureResourcePermissions -from databricks.labs.ucx.azure.resources import AzureAPIClient, AzureResources from databricks.labs.ucx.config import WorkspaceConfig from databricks.labs.ucx.hive_metastore import Mounts, TablesCrawler from databricks.labs.ucx.hive_metastore.grants import AzureACL, Grant, PrincipalACL -from databricks.labs.ucx.hive_metastore.locations import ExternalLocations, Mount +from databricks.labs.ucx.hive_metastore.locations import Mount from databricks.labs.ucx.hive_metastore.tables import Table @@ -63,17 +61,9 @@ def ws(): def azure_acl(w, install, cluster_spn: list): config = install.load(WorkspaceConfig) sql_backend = StatementExecutionBackend(w, config.warehouse_id) - locations = create_autospec(ExternalLocations) - azure_client = AzureAPIClient( - w.config.arm_environment.resource_manager_endpoint, - w.config.arm_environment.service_management_endpoint, - ) - graph_client = AzureAPIClient("https://graph.microsoft.com", "https://graph.microsoft.com") - azurerm = AzureResources(azure_client, graph_client) - resource_permissions = AzureResourcePermissions(install, w, azurerm, locations) spn_crawler = create_autospec(AzureServicePrincipalCrawler) spn_crawler.get_cluster_to_storage_mapping.return_value = cluster_spn - return AzureACL(w, sql_backend, spn_crawler, resource_permissions) + return AzureACL(w, sql_backend, spn_crawler, install) def principal_acl(w, install, cluster_spn: list):