Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@
import static io.trino.testing.TestingAccessControlManager.TestingPrivilegeType.INSERT_TABLE;
import static io.trino.testing.TestingAccessControlManager.privilege;
import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_DELETE;
import static io.trino.testing.TestingConnectorBehavior.SUPPORTS_INSERT;
import static io.trino.testing.TestingSession.testSessionBuilder;
import static io.trino.testing.assertions.Assert.assertEquals;
import static io.trino.testing.assertions.Assert.assertEventually;
Expand Down Expand Up @@ -188,17 +187,6 @@ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
}
}

protected void skipUnlessInsertsSupported()
{
if (!hasBehavior(SUPPORTS_INSERT)) {
String tableName = "verify_insert_failure_" + randomTableSuffix();
assertUpdate("CREATE TABLE " + tableName + " (int_t INT)");
assertThatThrownBy(() -> query("INSERT INTO " + tableName + " VALUES 1"))
.hasMessageMatching("Inserts are not supported on the .* filesystem");
throw new SkipException("Test requires insert support");
}
}

@Test
public void testDropSchemaExternalFiles()
{
Expand Down Expand Up @@ -602,8 +590,6 @@ public void testCreateTableAsWithSchemaLocation()
@Test
public void testCreateTableWithSchemaLocation()
{
skipUnlessInsertsSupported();

String tableName = "table1_with_curr_schema_loc_" + randomTableSuffix();
String tableName2 = "table2_with_curr_schema_loc_" + randomTableSuffix();
String schemaName = "test_schema" + randomTableSuffix();
Expand Down Expand Up @@ -642,8 +628,6 @@ public void testRenameTable()
@Test
public void testRenameExternalTable()
{
skipUnlessInsertsSupported();

String oldTable = "test_external_table_rename_old_" + randomTableSuffix();

assertUpdate(format("CREATE TABLE %s (a bigint, b double) WITH (location = '%s')", oldTable, getLocationForTable(bucketName, oldTable)));
Expand Down Expand Up @@ -678,8 +662,6 @@ public void testRenameTableAcrossSchemas()
@Test
public void testRenameExternalTableAcrossSchemas()
{
skipUnlessInsertsSupported();

String oldTable = "test_rename_old_" + randomTableSuffix();
assertUpdate(format("CREATE TABLE %s (a bigint, b double) WITH (location = '%s')", oldTable, getLocationForTable(bucketName, oldTable)));
assertUpdate("INSERT INTO " + oldTable + " VALUES (42, 43)", 1);
Expand Down Expand Up @@ -838,8 +820,6 @@ public void testSelectNestedTimestamps()
public void testConvertJsonStatisticsToParquetOnRowType()
throws Exception
{
skipUnlessInsertsSupported();

assertQuery("SELECT count(*) FROM json_stats_on_row_type", "VALUES 2");
String transactionLogDirectory = "json_stats_on_row_type/_delta_log";
String newTransactionFile = getLocationForTable(bucketName, "json_stats_on_row_type") + "/_delta_log/00000000000000000004.json";
Expand Down Expand Up @@ -908,8 +888,6 @@ public void testUppercaseColumnNames()
@Test
public void testInsertIntoNonLowercaseColumnTable()
{
skipUnlessInsertsSupported();

assertQuery(
"SELECT * FROM insert_nonlowercase_columns",
"VALUES " +
Expand Down Expand Up @@ -946,8 +924,6 @@ public void testInsertIntoNonLowercaseColumnTable()
@Test
public void testInsertNestedNonLowercaseColumns()
{
skipUnlessInsertsSupported();

assertQuery(
"SELECT an_int, nested.lower_case_string, nested.upper_case_string, nested.mixed_case_string FROM insert_nested_nonlowercase_columns",
"VALUES " +
Expand Down Expand Up @@ -987,8 +963,6 @@ public void testInsertNestedNonLowercaseColumns()
@Test
public void testInsertIntoPartitionedTable()
{
skipUnlessInsertsSupported();

String tableName = "test_insert_partitioned_" + randomTableSuffix();
assertUpdate(
format("CREATE TABLE %s (a_number, a_string) " +
Expand All @@ -1012,8 +986,6 @@ public void testInsertIntoPartitionedTable()
@Test
public void testInsertIntoPartitionedNonLowercaseColumnTable()
{
skipUnlessInsertsSupported();

assertQuery(
"SELECT * FROM insert_nonlowercase_columns_partitioned",
"VALUES " +
Expand Down Expand Up @@ -1054,8 +1026,6 @@ public void testInsertIntoPartitionedNonLowercaseColumnTable()
@Test
public void testPartialInsert()
{
skipUnlessInsertsSupported();

String tableName = "test_partial_insert_" + randomTableSuffix();
assertUpdate(
format("CREATE TABLE %s (a_number, a_string) WITH (location = '%s') AS " +
Expand All @@ -1071,8 +1041,6 @@ public void testPartialInsert()
@Test
public void testPartialInsertIntoPartitionedTable()
{
skipUnlessInsertsSupported();

String tableName = "test_partial_insert_partitioned_" + randomTableSuffix();
assertUpdate(
format("CREATE TABLE %s (a_number, a_string) " +
Expand All @@ -1093,8 +1061,6 @@ public void testPartialInsertIntoPartitionedTable()
@Test
public void testInsertColumnOrdering()
{
skipUnlessInsertsSupported();

String tableName = "test_insert_column_ordering_" + randomTableSuffix();
assertUpdate(
format("CREATE TABLE %s (a INT, b INT, c INT) WITH (location = '%s', partitioned_by = ARRAY['a', 'b'])",
Expand Down Expand Up @@ -1131,8 +1097,6 @@ public void testDefaultPartitions()
@Test
public void testCheckpointing()
{
skipUnlessInsertsSupported();

String tableName = "test_insert_checkpointing_" + randomTableSuffix();
assertUpdate(
format("CREATE TABLE %s (a_number, a_string) " +
Expand Down Expand Up @@ -1170,8 +1134,6 @@ public void testCheckpointing()
@Test(dataProvider = "testCheckpointWriteStatsAsStructDataProvider")
public void testCheckpointWriteStatsAsStruct(String type, String inputValue, String nullsFraction, String statsValue)
{
skipUnlessInsertsSupported();

String tableName = "test_checkpoint_write_stats_as_struct_" + randomTableSuffix();

// Set 'checkpoint_interval' as 1 to write 'stats_parsed' field every INSERT
Expand Down Expand Up @@ -1217,8 +1179,6 @@ public Object[][] testCheckpointWriteStatsAsStructDataProvider()
@Test
public void testCheckpointWriteStatsAsStructWithPartiallyUnsupportedColumnStats()
{
skipUnlessInsertsSupported();

String tableName = "test_checkpoint_write_stats_as_struct_partially_unsupported_" + randomTableSuffix();

// Column statistics on boolean column is unsupported
Expand Down Expand Up @@ -1272,8 +1232,6 @@ public Object[][] testDeltaLakeTableLocationChangedPartitionedDataProvider()
private void testDeltaLakeTableLocationChanged(boolean fewerEntries, boolean firstPartitioned, boolean secondPartitioned)
throws Exception
{
skipUnlessInsertsSupported();

// Create a table with a bunch of transaction log entries
String tableName = "test_table_location_changed_" + randomTableSuffix();
String initialLocation = getLocationForTable(bucketName, tableName);
Expand Down Expand Up @@ -1399,8 +1357,6 @@ public void testAnalyze()
@Test
public void testStatsSplitPruningBasedOnSepCreatedCheckpoint()
{
skipUnlessInsertsSupported();

String tableName = "test_sep_checkpoint_stats_pruning_" + randomTableSuffix();
String transactionLogDirectory = format("%s/_delta_log", tableName);
assertUpdate(
Expand Down Expand Up @@ -1440,8 +1396,6 @@ public void testStatsSplitPruningBasedOnSepCreatedCheckpoint()
@Test
public void testStatsSplitPruningBasedOnSepCreatedCheckpointOnTopOfCheckpointWithJustStructStats()
{
skipUnlessInsertsSupported();

String tableName = "test_sep_checkpoint_stats_pruning_struct_stats_" + randomTableSuffix();
createTableFromResources(tableName, "databricks/pruning/parquet_struct_statistics", getQueryRunner());
String transactionLogDirectory = format("%s/_delta_log", tableName);
Expand Down Expand Up @@ -1478,8 +1432,6 @@ public void testStatsSplitPruningBasedOnSepCreatedCheckpointOnTopOfCheckpointWit
public void testVacuum()
throws Exception
{
skipUnlessInsertsSupported();

String catalog = getSession().getCatalog().orElseThrow();
String tableName = "test_vacuum" + randomTableSuffix();
String tableLocation = getLocationForTable(bucketName, tableName);
Expand Down Expand Up @@ -1565,8 +1517,6 @@ public void testVacuumAccessControl()
@Test
public void testOptimize()
{
skipUnlessInsertsSupported();

String tableName = "test_optimize_" + randomTableSuffix();
String tableLocation = getLocationForTable(bucketName, tableName);
assertUpdate("CREATE TABLE " + tableName + " (key integer, value varchar) WITH (location = '" + tableLocation + "')");
Expand Down Expand Up @@ -1629,8 +1579,6 @@ public void testOptimizeParameterValidation()
@Test
public void testOptimizeWithPartitionedTable()
{
skipUnlessInsertsSupported();

String tableName = "test_optimize_partitioned_table_" + randomTableSuffix();
String tableLocation = getLocationForTable(bucketName, tableName);
assertUpdate("CREATE TABLE " + tableName + " (key integer, value varchar) WITH (location = '" + tableLocation + "', partitioned_by = ARRAY['value'])");
Expand Down Expand Up @@ -1666,8 +1614,6 @@ public void testOptimizeWithPartitionedTable()
@Test
public void testOptimizeWithEnforcedRepartitioning()
{
skipUnlessInsertsSupported();

Session currentSession = testSessionBuilder()
.setCatalog(getQueryRunner().getDefaultSession().getCatalog())
.setSchema(getQueryRunner().getDefaultSession().getSchema())
Expand Down Expand Up @@ -1773,8 +1719,6 @@ public void testDelete()
@Test
public void testOptimizeUsingForcedPartitioning()
{
skipUnlessInsertsSupported();

String tableName = "test_optimize_partitioned_table_" + randomTableSuffix();
String tableLocation = getLocationForTable(bucketName, tableName);
assertUpdate("CREATE TABLE " + tableName + " (key varchar, value1 integer, value2 varchar, value3 integer) WITH (location = '" + tableLocation + "', partitioned_by = ARRAY['key', 'value2', 'value3'])");
Expand Down