Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -71,4 +71,10 @@ protected List<String> listCheckpointFiles(String transactionLogDirectory)
.map(path -> format("s3://%s/%s", bucketName, path))
.collect(toImmutableList());
}

@Override
protected String bucketUrl()
{
return format("s3://%s/", bucketName);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import io.trino.execution.QueryManager;
import io.trino.operator.OperatorStats;
import io.trino.plugin.deltalake.util.DockerizedDataLake;
import io.trino.plugin.deltalake.util.TestingHadoop;
import io.trino.plugin.hive.TestingHivePlugin;
import io.trino.spi.QueryId;
import io.trino.sql.planner.OptimizerConfig.JoinDistributionType;
Expand Down Expand Up @@ -184,6 +185,46 @@ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
}
}

@Test
public void testDropSchemaExternalFiles()
{
String schemaName = "externalFileSchema";
String schemaDir = bucketUrl() + "drop-schema-with-external-files/";
String subDir = schemaDir + "subdir/";
String externalFile = subDir + "external-file";

TestingHadoop hadoopContainer = dockerizedDataLake.getTestingHadoop();

// Create file in a subdirectory of the schema directory before creating schema
hadoopContainer.runCommandInContainer("hdfs", "dfs", "-mkdir", "-p", subDir);
hadoopContainer.runCommandInContainer("hdfs", "dfs", "-touchz", externalFile);

query(format("CREATE SCHEMA %s WITH (location = '%s')", schemaName, schemaDir));
assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode())
.as("external file exists after creating schema")
.isEqualTo(0);

query("DROP SCHEMA " + schemaName);
assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode())
.as("external file exists after dropping schema")
.isEqualTo(0);

// Test behavior without external file
hadoopContainer.runCommandInContainer("hdfs", "dfs", "-rm", "-r", subDir);

query(format("CREATE SCHEMA %s WITH (location = '%s')", schemaName, schemaDir));
assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-d", schemaDir).getExitCode())
.as("schema directory exists after creating schema")
.isEqualTo(0);

query("DROP SCHEMA " + schemaName);
assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode())
.as("schema directory deleted after dropping schema without external file")
.isEqualTo(1);
}

protected abstract String bucketUrl();

@Test
public void testCharTypeIsNotSupported()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,9 @@
import com.google.common.io.Resources;
import com.google.common.reflect.ClassPath;
import io.trino.plugin.deltalake.util.DockerizedDataLake;
import io.trino.plugin.deltalake.util.TestingHadoop;
import io.trino.testing.QueryRunner;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;

import java.io.IOException;
import java.io.UncheckedIOException;
Expand Down Expand Up @@ -78,46 +76,6 @@ public TestDeltaLakeAdlsConnectorSmokeTest(String container, String account, Str
this.adlsDirectory = format("abfs://%s@%s.dfs.core.windows.net/%s/", container, account, bucketName);
}

@Test
public void testDropSchemaExternalFiles()
{
// TODO move this test to base class, so it's exercised for S3 too

String schemaName = "externalFileSchema";
String schemaDir = fullAdlsUrl() + "drop-schema-with-external-files/";
String subDir = schemaDir + "subdir/";
String externalFile = subDir + "external-file";

TestingHadoop hadoopContainer = dockerizedDataLake.getTestingHadoop();

// Create file in a subdirectory of the schema directory before creating schema
hadoopContainer.runCommandInContainer("hdfs", "dfs", "-mkdir", "-p", subDir);
hadoopContainer.runCommandInContainer("hdfs", "dfs", "-touchz", externalFile);

query(format("CREATE SCHEMA %s WITH (location = '%s')", schemaName, schemaDir));
assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode())
.as("external file exists after creating schema")
.isEqualTo(0);

query("DROP SCHEMA " + schemaName);
assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode())
.as("external file exists after dropping schema")
.isEqualTo(0);

// Test behavior without external file
hadoopContainer.runCommandInContainer("hdfs", "dfs", "-rm", "-r", subDir);

query(format("CREATE SCHEMA %s WITH (location = '%s')", schemaName, schemaDir));
assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-d", schemaDir).getExitCode())
.as("schema directory exists after creating schema")
.isEqualTo(0);

query("DROP SCHEMA " + schemaName);
assertThat(hadoopContainer.executeInContainer("hdfs", "dfs", "-test", "-e", externalFile).getExitCode())
.as("schema directory deleted after dropping schema without external file")
.isEqualTo(1);
}

@Override
DockerizedDataLake createDockerizedDataLake()
throws Exception
Expand Down Expand Up @@ -186,7 +144,7 @@ void createTableFromResources(String table, String resourcePath, QueryRunner que
@Override
String getLocationForTable(String bucketName, String tableName)
{
return fullAdlsUrl() + tableName;
return bucketUrl() + tableName;
}

@Override
Expand Down Expand Up @@ -221,7 +179,8 @@ private List<String> listAllFilesRecursive(String directory)
.collect(toImmutableList());
}

private String fullAdlsUrl()
@Override
protected String bucketUrl()
{
return format("abfs://%s@%s.dfs.core.windows.net/%s/", container, account, bucketName);
}
Expand Down