Skip to content

Commit

Permalink
Remove flaky tests (#2129)
Browse files Browse the repository at this point in the history
Fix #1930
  • Loading branch information
nfx authored Jul 10, 2024
1 parent 4a75a7b commit f52a018
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 84 deletions.
10 changes: 3 additions & 7 deletions tests/integration/assessment/test_ext_hms.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,20 @@
import dataclasses
from datetime import timedelta

from databricks.sdk.errors import (
InvalidParameterValue,
NotFound,
)
from databricks.sdk.retries import retried
from databricks.labs.lsql.backends import CommandExecutionBackend
from databricks.sdk.service.iam import PermissionLevel


@retried(on=[NotFound, InvalidParameterValue], timeout=timedelta(minutes=5))
def test_running_real_assessment_job_ext_hms(
ws,
installation_ctx,
env_or_skip,
make_cluster_policy,
make_cluster_policy_permissions,
):
cluster_id = env_or_skip('TEST_EXT_HMS_CLUSTER_ID')
ext_hms_ctx = installation_ctx.replace(
skip_dashboards=True,
sql_backend=CommandExecutionBackend(ws, cluster_id),
config_transform=lambda wc: dataclasses.replace(
wc,
override_clusters=None,
Expand Down
8 changes: 1 addition & 7 deletions tests/integration/hive_metastore/test_workflows.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
from datetime import timedelta

import pytest
from databricks.sdk.errors import InvalidParameterValue, NotFound
from databricks.sdk.retries import retried
from databricks.sdk.errors import NotFound


@retried(on=[NotFound, InvalidParameterValue], timeout=timedelta(minutes=10))
@pytest.mark.parametrize(
"prepare_tables_for_migration,workflow",
[
Expand Down Expand Up @@ -63,7 +59,6 @@ def test_table_migration_job_refreshes_migration_status(
assert len(asserts) == 0, assert_message


@retried(on=[NotFound], timeout=timedelta(minutes=8))
@pytest.mark.parametrize('prepare_tables_for_migration', [('hiveserde')], indirect=True)
def test_hiveserde_table_in_place_migration_job(ws, installation_ctx, prepare_tables_for_migration):
tables, dst_schema = prepare_tables_for_migration
Expand All @@ -84,7 +79,6 @@ def test_hiveserde_table_in_place_migration_job(ws, installation_ctx, prepare_ta
assert False, f"{table.name} not found in {dst_schema.catalog_name}.{dst_schema.name}"


@retried(on=[NotFound], timeout=timedelta(minutes=8))
@pytest.mark.parametrize('prepare_tables_for_migration', [('hiveserde')], indirect=True)
def test_hiveserde_table_ctas_migration_job(ws, installation_ctx, prepare_tables_for_migration):
tables, dst_schema = prepare_tables_for_migration
Expand Down
70 changes: 0 additions & 70 deletions tests/integration/install/test_installation.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
import dataclasses
import json
import logging
from dataclasses import replace
from datetime import timedelta

import pytest # pylint: disable=wrong-import-order
from databricks.labs.ucx.__about__ import __version__

from databricks.labs.blueprint.installation import Installation
from databricks.labs.blueprint.installer import RawState
from databricks.labs.blueprint.parallel import ManyError
from databricks.labs.blueprint.tui import MockPrompts
from databricks.labs.blueprint.wheels import ProductInfo
Expand Down Expand Up @@ -356,74 +354,6 @@ def test_check_inventory_database_exists(ws, installation_ctx):
installation_ctx.workspace_installer.configure()


@retried(on=[NotFound], timeout=timedelta(minutes=5))
@pytest.mark.parametrize('prepare_tables_for_migration', [('regular')], indirect=True)
def test_table_migration_job(ws, installation_ctx, env_or_skip, prepare_tables_for_migration):

ctx = installation_ctx.replace(
config_transform=lambda wc: replace(wc, override_clusters=None),
extend_prompts={
r"Parallelism for migrating.*": "1000",
r"Min workers for auto-scale.*": "2",
r"Max workers for auto-scale.*": "20",
r"Instance pool id to be set.*": env_or_skip("TEST_INSTANCE_POOL_ID"),
r".*Do you want to update the existing installation?.*": 'yes',
},
)
tables, dst_schema = prepare_tables_for_migration

ctx.workspace_installation.run()
ctx.deployed_workflows.run_workflow("migrate-tables")
# assert the workflow is successful
assert ctx.deployed_workflows.validate_step("migrate-tables")
# assert the tables are migrated
for table in tables.values():
try:
assert ws.tables.get(f"{dst_schema.catalog_name}.{dst_schema.name}.{table.name}").name
except NotFound:
assert False, f"{table.name} not found in {dst_schema.catalog_name}.{dst_schema.name}"
# assert the cluster is configured correctly
for job_cluster in ws.jobs.get(
ctx.installation.load(RawState).resources["jobs"]["migrate-tables"]
).settings.job_clusters:
if job_cluster.job_cluster_key != "table_migration":
# don't assert on the cluster for parse logs task
continue
assert job_cluster.new_cluster.autoscale.min_workers == 2
assert job_cluster.new_cluster.autoscale.max_workers == 20
assert job_cluster.new_cluster.spark_conf["spark.sql.sources.parallelPartitionDiscovery.parallelism"] == "1000"


@retried(on=[NotFound], timeout=timedelta(minutes=8))
@pytest.mark.parametrize('prepare_tables_for_migration', [('regular')], indirect=True)
def test_table_migration_job_cluster_override(ws, installation_ctx, prepare_tables_for_migration, env_or_skip):

tables, dst_schema = prepare_tables_for_migration
ctx = installation_ctx.replace(
extend_prompts={
r".*Do you want to update the existing installation?.*": 'yes',
},
)
ctx.workspace_installation.run()
ctx.deployed_workflows.run_workflow("migrate-tables")
# assert the workflow is successful
assert ctx.deployed_workflows.validate_step("migrate-tables")
# assert the tables are migrated
for table in tables.values():
try:
assert ws.tables.get(f"{dst_schema.catalog_name}.{dst_schema.name}.{table.name}").name
except NotFound:
assert False, f"{table.name} not found in {dst_schema.catalog_name}.{dst_schema.name}"
# assert the cluster is configured correctly on the migrate tables tasks
install_state = ctx.installation.load(RawState)
job_id = install_state.resources["jobs"]["migrate-tables"]
assert all(
task.existing_cluster_id == env_or_skip("TEST_USER_ISOLATION_CLUSTER_ID")
for task in ws.jobs.get(job_id).settings.tasks
if task.task_key != "parse_logs"
)


def test_compare_remote_local_install_versions(ws, installation_ctx):
installation_ctx.workspace_installation.run()
with pytest.raises(
Expand Down

0 comments on commit f52a018

Please sign in to comment.