Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -984,6 +984,54 @@ public void testIdentityColumns()
entry("delta.identity.allowExplicitInsert", false));
}

@Test
public void testWritesToTableWithIdentityColumnFails()
throws Exception
{
String tableName = "test_identity_columns_" + randomNameSuffix();
Path tableLocation = catalogDir.resolve(tableName);
copyDirectoryContents(new File(Resources.getResource("databricks122/identity_columns").toURI()).toPath(), tableLocation);
Comment thread
ebyhr marked this conversation as resolved.
Outdated
assertUpdate("CALL system.register_table(CURRENT_SCHEMA, '%s', '%s')".formatted(tableName, tableLocation.toUri()));

// Disallowing all statements just in case though some statements may be unrelated to identity columns
assertQueryFails(
"INSERT INTO " + tableName + " VALUES (4, 4)",
"Writing to tables with identity columns is not supported");
assertQueryFails(
"UPDATE " + tableName + " SET a = 3",
"Writing to tables with identity columns is not supported");
assertQueryFails(
"DELETE FROM " + tableName,
"Writing to tables with identity columns is not supported");
assertQueryFails(
"MERGE INTO " + tableName + " t USING " + tableName + " s ON (t.a = s.a) WHEN MATCHED THEN UPDATE SET a = 1",
"Writing to tables with identity columns is not supported");
}

@Test
public void testIdentityColumnTableFeature()
throws Exception
{
String tableName = "test_identity_columns_table_feature_" + randomNameSuffix();
Path tableLocation = catalogDir.resolve(tableName);
copyDirectoryContents(new File(Resources.getResource("databricks133/identity_columns_table_feature").toURI()).toPath(), tableLocation);
assertUpdate("CALL system.register_table(CURRENT_SCHEMA, '%s', '%s')".formatted(tableName, tableLocation.toUri()));

// Disallowing all statements just in case though some statements may be unrelated to identity columns
assertQueryFails(
"INSERT INTO " + tableName + " VALUES (4, 4)",
"\\QUnsupported writer features: [identityColumns]");
assertQueryFails(
"UPDATE " + tableName + " SET a = 3",
"\\QUnsupported writer features: [identityColumns]");
assertQueryFails(
"DELETE FROM " + tableName,
"\\QUnsupported writer features: [identityColumns]");
assertQueryFails(
"MERGE INTO " + tableName + " t USING " + tableName + " s ON (t.a = s.a) WHEN MATCHED THEN UPDATE SET a = 1",
"\\QUnsupported writer features: [identityColumns]");
}

/**
* @see deltalake.allow_column_defaults
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
Data generated using Databricks 13.3:

```sql
CREATE TABLE default.identity_columns_table_feature
(a INT, b INT)
Comment thread
ebyhr marked this conversation as resolved.
Outdated
USING DELTA
LOCATION ?
TBLPROPERTIES ('delta.feature.identityColumns' = 'supported');
```
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{"commitInfo":{"timestamp":1718844490007,"userId":"7853186923043731","userName":"yuya.ebihara@starburstdata.com","operation":"CREATE TABLE","operationParameters":{"partitionBy":"[]","description":null,"isManaged":"false","properties":"{}","statsOnLoad":false},"notebook":{"notebookId":"1841155838656679"},"clusterId":"0830-081135-p4ddj2po","isolationLevel":"WriteSerializable","isBlindAppend":true,"operationMetrics":{},"tags":{"restoresDeletedRows":"false"},"engineInfo":"Databricks-Runtime/13.3.x-scala2.12","txnId":"b702de9c-e406-4c13-9e80-98cfb4503462"}}
{"metaData":{"id":"d26d5753-d69d-4529-986e-80e6d5d6ce95","format":{"provider":"parquet","options":{}},"schemaString":"{\"type\":\"struct\",\"fields\":[{\"name\":\"a\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"b\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}","partitionColumns":[],"configuration":{},"createdTime":1718844489815}}
{"protocol":{"minReaderVersion":1,"minWriterVersion":7,"writerFeatures":["identityColumns"]}}
Original file line number Diff line number Diff line change
Expand Up @@ -78,64 +78,6 @@ public void testIdentityColumn()
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testIdentityColumnTableFeature()
{
String tableName = "test_identity_column_feature_" + randomNameSuffix();

onDelta().executeQuery("CREATE TABLE default." + tableName +
"(data INT, col_identity BIGINT GENERATED ALWAYS AS IDENTITY)" +
"USING DELTA " +
"LOCATION 's3://" + bucketName + "/" + "databricks-compatibility-test-" + tableName + "'" +
"TBLPROPERTIES ('delta.feature.identityColumns'='supported')");
try {
assertQueryFailure(() -> onTrino().executeQuery("INSERT INTO delta.default." + tableName + " VALUES (1, 1)"))
.hasMessageMatching(".* Writing to tables with identity columns is not supported");
assertQueryFailure(() -> onTrino().executeQuery("UPDATE delta.default." + tableName + " SET data = 1"))
.hasMessageMatching(".* Writing to tables with identity columns is not supported");
assertQueryFailure(() -> onTrino().executeQuery("DELETE FROM delta.default." + tableName))
.hasMessageMatching(".* Writing to tables with identity columns is not supported");
assertQueryFailure(() -> onTrino().executeQuery("MERGE INTO delta.default." + tableName + " t USING delta.default." + tableName + " s " +
"ON (t.data = s.data) WHEN MATCHED THEN UPDATE SET data = 1"))
.hasMessageMatching(".* Writing to tables with identity columns is not supported");
}
finally {
dropDeltaTableWithRetry("default." + tableName);
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testWritesToTableWithIdentityColumnFails()
{
String tableName = "test_writes_into_table_with_identity_column_" + randomNameSuffix();
onDelta().executeQuery("CREATE TABLE default." + tableName +
"(data INT, col_identity BIGINT GENERATED ALWAYS AS IDENTITY)" +
"USING DELTA " +
"LOCATION 's3://" + bucketName + "/databricks-compatibility-test-" + tableName + "'");
try {
onDelta().executeQuery("INSERT INTO default." + tableName + " (data) VALUES (1), (2), (3)");

assertThat(onTrino().executeQuery("SELECT * FROM delta.default." + tableName))
.containsOnly(row(1, 1), row(2, 2), row(3, 3));

// Disallowing all statements just in case though some statements may not unrelated to identity columns
assertQueryFailure(() -> onTrino().executeQuery("INSERT INTO delta.default." + tableName + " VALUES (4, 4)"))
.hasMessageContaining("Writing to tables with identity columns is not supported");
assertQueryFailure(() -> onTrino().executeQuery("UPDATE delta.default." + tableName + " SET data = 3"))
.hasMessageContaining("Writing to tables with identity columns is not supported");
assertQueryFailure(() -> onTrino().executeQuery("DELETE FROM delta.default." + tableName))
.hasMessageContaining("Writing to tables with identity columns is not supported");
assertQueryFailure(() -> onTrino().executeQuery("MERGE INTO delta.default." + tableName + " t USING delta.default." + tableName + " s " +
"ON (t.data = s.data) WHEN MATCHED THEN UPDATE SET data = 1"))
.hasMessageContaining("Writing to tables with identity columns is not supported");
}
finally {
dropDeltaTableWithRetry("default." + tableName);
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_91, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingDataProvider")
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testRenameIdentityColumn(String mode)
Expand Down