diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java index 976a1079fae8..88470301c073 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java @@ -71,4 +71,10 @@ protected List listCheckpointFiles(String transactionLogDirectory) .map(path -> format("s3://%s/%s", bucketName, path)) .collect(toImmutableList()); } + + @Override + protected String bucketUrl() + { + return format("s3://%s/", bucketName); + } } diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeConnectorSmokeTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeConnectorSmokeTest.java index d332b2e8aebf..5cd5d249e05b 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeConnectorSmokeTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeConnectorSmokeTest.java @@ -23,6 +23,7 @@ import io.trino.execution.QueryManager; import io.trino.operator.OperatorStats; import io.trino.plugin.deltalake.util.DockerizedDataLake; +import io.trino.plugin.deltalake.util.TestingHadoop; import io.trino.plugin.hive.TestingHivePlugin; import io.trino.spi.QueryId; import io.trino.sql.planner.OptimizerConfig.JoinDistributionType; @@ -184,6 +185,46 @@ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior) } } + @Test + public void testDropSchemaExternalFiles() + { + String schemaName = "externalFileSchema"; + String schemaDir = bucketUrl() + "drop-schema-with-external-files/"; + String subDir = schemaDir + "subdir/"; + String externalFile = subDir + "external-file"; + + TestingHadoop hadoopContainer = dockerizedDataLake.getTestingHadoop(); + + // Create file in a subdirectory of the schema directory before creating schema + hadoopContainer.runCommandInContainer("hdfs", "dfs", "-mkdir", "-p", subDir); + hadoopContainer.runCommandInContainer("hdfs", "dfs", "-touchz", externalFile); + + query(format("CREATE SCHEMA %s WITH (location = '%s')", schemaName, schemaDir)); + assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) + .as("external file exists after creating schema") + .isEqualTo(0); + + query("DROP SCHEMA " + schemaName); + assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) + .as("external file exists after dropping schema") + .isEqualTo(0); + + // Test behavior without external file + hadoopContainer.runCommandInContainer("hdfs", "dfs", "-rm", "-r", subDir); + + query(format("CREATE SCHEMA %s WITH (location = '%s')", schemaName, schemaDir)); + assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-d", schemaDir).getExitCode()) + .as("schema directory exists after creating schema") + .isEqualTo(0); + + query("DROP SCHEMA " + schemaName); + assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) + .as("schema directory deleted after dropping schema without external file") + .isEqualTo(1); + } + + protected abstract String bucketUrl(); + @Test public void testCharTypeIsNotSupported() { diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeAdlsConnectorSmokeTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeAdlsConnectorSmokeTest.java index 8b871c28ad85..19a9a944942d 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeAdlsConnectorSmokeTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeAdlsConnectorSmokeTest.java @@ -23,11 +23,9 @@ import com.google.common.io.Resources; import com.google.common.reflect.ClassPath; import io.trino.plugin.deltalake.util.DockerizedDataLake; -import io.trino.plugin.deltalake.util.TestingHadoop; import io.trino.testing.QueryRunner; import org.testng.annotations.AfterClass; import org.testng.annotations.Parameters; -import org.testng.annotations.Test; import java.io.IOException; import java.io.UncheckedIOException; @@ -78,46 +76,6 @@ public TestDeltaLakeAdlsConnectorSmokeTest(String container, String account, Str this.adlsDirectory = format("abfs://%s@%s.dfs.core.windows.net/%s/", container, account, bucketName); } - @Test - public void testDropSchemaExternalFiles() - { - // TODO move this test to base class, so it's exercised for S3 too - - String schemaName = "externalFileSchema"; - String schemaDir = fullAdlsUrl() + "drop-schema-with-external-files/"; - String subDir = schemaDir + "subdir/"; - String externalFile = subDir + "external-file"; - - TestingHadoop hadoopContainer = dockerizedDataLake.getTestingHadoop(); - - // Create file in a subdirectory of the schema directory before creating schema - hadoopContainer.runCommandInContainer("hdfs", "dfs", "-mkdir", "-p", subDir); - hadoopContainer.runCommandInContainer("hdfs", "dfs", "-touchz", externalFile); - - query(format("CREATE SCHEMA %s WITH (location = '%s')", schemaName, schemaDir)); - assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) - .as("external file exists after creating schema") - .isEqualTo(0); - - query("DROP SCHEMA " + schemaName); - assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) - .as("external file exists after dropping schema") - .isEqualTo(0); - - // Test behavior without external file - hadoopContainer.runCommandInContainer("hdfs", "dfs", "-rm", "-r", subDir); - - query(format("CREATE SCHEMA %s WITH (location = '%s')", schemaName, schemaDir)); - assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-d", schemaDir).getExitCode()) - .as("schema directory exists after creating schema") - .isEqualTo(0); - - query("DROP SCHEMA " + schemaName); - assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode()) - .as("schema directory deleted after dropping schema without external file") - .isEqualTo(1); - } - @Override DockerizedDataLake createDockerizedDataLake() throws Exception @@ -186,7 +144,7 @@ void createTableFromResources(String table, String resourcePath, QueryRunner que @Override String getLocationForTable(String bucketName, String tableName) { - return fullAdlsUrl() + tableName; + return bucketUrl() + tableName; } @Override @@ -221,7 +179,8 @@ private List listAllFilesRecursive(String directory) .collect(toImmutableList()); } - private String fullAdlsUrl() + @Override + protected String bucketUrl() { return format("abfs://%s@%s.dfs.core.windows.net/%s/", container, account, bucketName); }