Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -944,10 +944,6 @@ private static boolean isCreatedBy(Table table, String queryId)
@Override
public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata newColumnMetadata)
{
if (newColumnMetadata.getComment() != null) {
throw new TrinoException(NOT_SUPPORTED, "This connector does not support adding columns with comments");
}

DeltaLakeTableHandle handle = (DeltaLakeTableHandle) tableHandle;
ConnectorTableMetadata tableMetadata = getTableMetadata(session, handle);

Expand All @@ -961,6 +957,11 @@ public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle
.map(column -> toColumnHandle(column, partitionColumns))
.collect(toImmutableList()));
columnsBuilder.add(toColumnHandle(newColumnMetadata, partitionColumns));
ImmutableMap.Builder<String, String> columnComments = ImmutableMap.builder();
columnComments.putAll(getColumnComments(handle.getMetadataEntry()));
if (newColumnMetadata.getComment() != null) {
columnComments.put(newColumnMetadata.getName(), newColumnMetadata.getComment());
}

Optional<Long> checkpointInterval = DeltaLakeTableProperties.getCheckpointInterval(tableMetadata.getProperties());

Expand All @@ -971,7 +972,7 @@ public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle
handle.getMetadataEntry().getId(),
columnsBuilder.build(),
partitionColumns,
getColumnComments(handle.getMetadataEntry()),
columnComments.buildOrThrow(),
buildDeltaMetadataConfiguration(checkpointInterval),
ADD_COLUMN_OPERATION,
session,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ protected boolean hasBehavior(TestingConnectorBehavior connectorBehavior)
case SUPPORTS_TOPN_PUSHDOWN:
case SUPPORTS_AGGREGATION_PUSHDOWN:
case SUPPORTS_RENAME_TABLE:
case SUPPORTS_ADD_COLUMN_WITH_COMMENT:
case SUPPORTS_DROP_COLUMN:
case SUPPORTS_RENAME_COLUMN:
case SUPPORTS_COMMENT_ON_TABLE:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.deltalake;

import org.testng.annotations.Test;

import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.getColumnCommentOnDelta;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.getColumnCommentOnTrino;
import static io.trino.tests.product.hive.util.TemporaryHiveTable.randomTableSuffix;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.lang.String.format;
import static org.testng.Assert.assertEquals;

public class TestDeltaLakeAlterTableCompatibility
extends BaseTestDeltaLakeS3Storage
{
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
public void testAddColumnWithCommentOnTrino()
{
String tableName = "test_dl_add_column_with_comment_" + randomTableSuffix();
String tableDirectory = "databricks-compatibility-test-" + tableName;

onTrino().executeQuery(format("CREATE TABLE delta.default.%s (col INT) WITH (location = 's3://%s/%s')",
tableName,
bucketName,
tableDirectory));

try {
onTrino().executeQuery("ALTER TABLE delta.default." + tableName + " ADD COLUMN new_col INT COMMENT 'new column comment'");
assertEquals(getColumnCommentOnTrino("default", tableName, "new_col"), "new column comment");
assertEquals(getColumnCommentOnDelta("default", tableName, "new_col"), "new column comment");
}
finally {
onTrino().executeQuery("DROP TABLE delta.default." + tableName);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.getColumnCommentOnDelta;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.getColumnCommentOnTrino;
import static io.trino.tests.product.hive.util.TemporaryHiveTable.randomTableSuffix;
import static io.trino.tests.product.utils.QueryExecutors.onDelta;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
Expand Down Expand Up @@ -248,16 +250,4 @@ public void testCreateTableWithColumnCommentOnDelta()
onDelta().executeQuery("DROP TABLE default." + tableName);
}
}

private static String getColumnCommentOnTrino(String schemaName, String tableName, String columnName)
{
QueryResult result = onTrino().executeQuery("SELECT comment FROM information_schema.columns WHERE table_schema = '" + schemaName + "' AND table_name = '" + tableName + "' AND column_name = '" + columnName + "'");
return (String) result.row(0).get(0);
}

private static String getColumnCommentOnDelta(String schemaName, String tableName, String columnName)
{
QueryResult result = onDelta().executeQuery(format("DESCRIBE %s.%s %s", schemaName, tableName, columnName));
return (String) result.row(2).get(1);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.deltalake.util;

import io.trino.tempto.query.QueryResult;

import static io.trino.tests.product.utils.QueryExecutors.onDelta;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.lang.String.format;

public final class DeltaLakeTestUtils
{
private DeltaLakeTestUtils() {}

public static String getColumnCommentOnTrino(String schemaName, String tableName, String columnName)
{
QueryResult result = onTrino().executeQuery("SELECT comment FROM information_schema.columns WHERE table_schema = '" + schemaName + "' AND table_name = '" + tableName + "' AND column_name = '" + columnName + "'");
return (String) result.row(0).get(0);
}

public static String getColumnCommentOnDelta(String schemaName, String tableName, String columnName)
{
QueryResult result = onDelta().executeQuery(format("DESCRIBE %s.%s %s", schemaName, tableName, columnName));
return (String) result.row(2).get(1);
}
}