Skip to content

Commit 48f5e05

Browse files
committed
Remove system.iceberg_tables table function
We decided to add a system table instead. This reverts commit 5ce80be.
1 parent 318ce93 commit 48f5e05

File tree

10 files changed

+3
-316
lines changed

10 files changed

+3
-316
lines changed

plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergModule.java

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@
4242
import io.trino.plugin.iceberg.functions.IcebergFunctionProvider;
4343
import io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunctionProcessorProviderFactory;
4444
import io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunctionProvider;
45-
import io.trino.plugin.iceberg.functions.tables.IcebergTablesFunctionProvider;
4645
import io.trino.plugin.iceberg.procedure.AddFilesTableFromTableProcedure;
4746
import io.trino.plugin.iceberg.procedure.AddFilesTableProcedure;
4847
import io.trino.plugin.iceberg.procedure.DropExtendedStatsTableProcedure;
@@ -138,9 +137,7 @@ public void configure(Binder binder)
138137
tableProcedures.addBinding().toProvider(AddFilesTableProcedure.class).in(Scopes.SINGLETON);
139138
tableProcedures.addBinding().toProvider(AddFilesTableFromTableProcedure.class).in(Scopes.SINGLETON);
140139

141-
Multibinder<ConnectorTableFunction> tableFunctions = newSetBinder(binder, ConnectorTableFunction.class);
142-
tableFunctions.addBinding().toProvider(TableChangesFunctionProvider.class).in(Scopes.SINGLETON);
143-
tableFunctions.addBinding().toProvider(IcebergTablesFunctionProvider.class).in(Scopes.SINGLETON);
140+
newSetBinder(binder, ConnectorTableFunction.class).addBinding().toProvider(TableChangesFunctionProvider.class).in(Scopes.SINGLETON);
144141
binder.bind(FunctionProvider.class).to(IcebergFunctionProvider.class).in(Scopes.SINGLETON);
145142
binder.bind(TableChangesFunctionProcessorProviderFactory.class).in(Scopes.SINGLETON);
146143

plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/IcebergSplitManager.java

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
import io.trino.plugin.base.classloader.ClassLoaderSafeConnectorSplitSource;
2222
import io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunctionHandle;
2323
import io.trino.plugin.iceberg.functions.tablechanges.TableChangesSplitSource;
24-
import io.trino.plugin.iceberg.functions.tables.IcebergTablesFunction.IcebergTables;
2524
import io.trino.spi.connector.ConnectorSession;
2625
import io.trino.spi.connector.ConnectorSplitManager;
2726
import io.trino.spi.connector.ConnectorSplitSource;
@@ -158,9 +157,6 @@ public ConnectorSplitSource getSplits(
158157
.toSnapshot(functionHandle.endSnapshotId()));
159158
return new ClassLoaderSafeConnectorSplitSource(tableChangesSplitSource, IcebergSplitManager.class.getClassLoader());
160159
}
161-
if (function instanceof IcebergTables icebergTables) {
162-
return new ClassLoaderSafeConnectorSplitSource(new FixedSplitSource(icebergTables), IcebergSplitManager.class.getClassLoader());
163-
}
164160

165161
throw new IllegalStateException("Unknown table function: " + function);
166162
}

plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/functions/IcebergFunctionProvider.java

Lines changed: 0 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -14,20 +14,12 @@
1414
package io.trino.plugin.iceberg.functions;
1515

1616
import com.google.inject.Inject;
17-
import io.trino.plugin.base.classloader.ClassLoaderSafeTableFunctionProcessorProvider;
1817
import io.trino.plugin.base.classloader.ClassLoaderSafeTableFunctionProcessorProviderFactory;
19-
import io.trino.plugin.base.classloader.ClassLoaderSafeTableFunctionSplitProcessor;
2018
import io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunctionHandle;
2119
import io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunctionProcessorProviderFactory;
22-
import io.trino.plugin.iceberg.functions.tables.IcebergTablesFunction;
23-
import io.trino.spi.classloader.ThreadContextClassLoader;
24-
import io.trino.spi.connector.ConnectorSession;
25-
import io.trino.spi.connector.ConnectorSplit;
2620
import io.trino.spi.function.FunctionProvider;
2721
import io.trino.spi.function.table.ConnectorTableFunctionHandle;
28-
import io.trino.spi.function.table.TableFunctionProcessorProvider;
2922
import io.trino.spi.function.table.TableFunctionProcessorProviderFactory;
30-
import io.trino.spi.function.table.TableFunctionSplitProcessor;
3123

3224
import static java.util.Objects.requireNonNull;
3325

@@ -48,28 +40,6 @@ public TableFunctionProcessorProviderFactory getTableFunctionProcessorProviderFa
4840
if (functionHandle instanceof TableChangesFunctionHandle) {
4941
return new ClassLoaderSafeTableFunctionProcessorProviderFactory(tableChangesFunctionProcessorProviderFactory, getClass().getClassLoader());
5042
}
51-
if (functionHandle instanceof IcebergTablesFunction.IcebergTables) {
52-
ClassLoader classLoader = getClass().getClassLoader();
53-
return new TableFunctionProcessorProviderFactory()
54-
{
55-
@Override
56-
public TableFunctionProcessorProvider createTableFunctionProcessorProvider()
57-
{
58-
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
59-
return new ClassLoaderSafeTableFunctionProcessorProvider(new TableFunctionProcessorProvider()
60-
{
61-
@Override
62-
public TableFunctionSplitProcessor getSplitProcessor(ConnectorSession session, ConnectorTableFunctionHandle handle, ConnectorSplit split)
63-
{
64-
return new ClassLoaderSafeTableFunctionSplitProcessor(
65-
new IcebergTablesFunction.IcebergTablesProcessor(((IcebergTablesFunction.IcebergTables) split).tables()),
66-
getClass().getClassLoader());
67-
}
68-
}, classLoader);
69-
}
70-
}
71-
};
72-
}
7343

7444
throw new UnsupportedOperationException("Unsupported function: " + functionHandle);
7545
}

plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/functions/tables/IcebergTablesFunction.java

Lines changed: 0 additions & 129 deletions
This file was deleted.

plugin/trino-iceberg/src/main/java/io/trino/plugin/iceberg/functions/tables/IcebergTablesFunctionProvider.java

Lines changed: 0 additions & 42 deletions
This file was deleted.

plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergConnectorSmokeTest.java

Lines changed: 0 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
import io.trino.filesystem.TrinoFileSystem;
2323
import io.trino.plugin.iceberg.fileio.ForwardingFileIo;
2424
import io.trino.testing.BaseConnectorSmokeTest;
25-
import io.trino.testing.QueryRunner;
2625
import io.trino.testing.TestingConnectorBehavior;
2726
import io.trino.testing.sql.TestTable;
2827
import org.apache.iceberg.FileFormat;
@@ -71,7 +70,6 @@
7170
import static java.util.Objects.requireNonNull;
7271
import static java.util.concurrent.Executors.newFixedThreadPool;
7372
import static java.util.concurrent.TimeUnit.SECONDS;
74-
import static java.util.stream.Collectors.joining;
7573
import static org.assertj.core.api.Assertions.assertThat;
7674
import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS;
7775

@@ -838,56 +836,6 @@ public void testCreateOrReplaceWithTableChangesFunction()
838836
}
839837
}
840838

841-
@Test
842-
public void testIcebergTablesFunction()
843-
throws Exception
844-
{
845-
String schemaName = getSession().getSchema().orElseThrow();
846-
String firstSchema = "first_schema_" + randomNameSuffix();
847-
String secondSchema = "second_schema_" + randomNameSuffix();
848-
String firstSchemaLocation = schemaPath().replaceAll(schemaName, firstSchema);
849-
String secondSchemaLocation = schemaPath().replaceAll(schemaName, secondSchema);
850-
assertQuerySucceeds("CREATE SCHEMA " + firstSchema + " WITH (location = '%s')".formatted(firstSchemaLocation));
851-
assertQuerySucceeds("CREATE SCHEMA " + secondSchema + " WITH (location = '%s')".formatted(secondSchemaLocation));
852-
QueryRunner queryRunner = getQueryRunner();
853-
Session firstSchemaSession = Session.builder(queryRunner.getDefaultSession()).setSchema(firstSchema).build();
854-
Session secondSchemaSession = Session.builder(queryRunner.getDefaultSession()).setSchema(secondSchema).build();
855-
856-
try (TestTable _ = new TestTable(
857-
sql -> getQueryRunner().execute(firstSchemaSession, sql),
858-
"first_schema_table1_",
859-
"(id int)");
860-
TestTable _ = new TestTable(
861-
sql -> getQueryRunner().execute(firstSchemaSession, sql),
862-
"first_schema_table2_",
863-
"(id int)");
864-
TestTable secondSchemaTable = new TestTable(
865-
sql -> queryRunner.execute(secondSchemaSession, sql),
866-
"second_schema_table_",
867-
"(id int)");
868-
AutoCloseable _ = createAdditionalTables(firstSchema)) {
869-
String firstSchemaTablesValues = "VALUES " + getQueryRunner()
870-
.execute("SELECT table_schema, table_name FROM iceberg.information_schema.tables WHERE table_schema='%s'".formatted(firstSchema))
871-
.getMaterializedRows().stream()
872-
.map(row -> "('%s', '%s')".formatted(row.getField(0), row.getField(1)))
873-
.collect(joining(", "));
874-
String bothSchemasTablesValues = firstSchemaTablesValues + ", ('%s', '%s')".formatted(secondSchema, secondSchemaTable.getName());
875-
assertQuery("SELECT * FROM TABLE(iceberg.system.iceberg_tables(SCHEMA_NAME => '%s'))".formatted(firstSchema), firstSchemaTablesValues);
876-
assertQuery("SELECT * FROM TABLE(iceberg.system.iceberg_tables(null)) WHERE table_schema = '%s'".formatted(firstSchema), firstSchemaTablesValues);
877-
assertQuery("SELECT * FROM TABLE(iceberg.system.iceberg_tables()) WHERE table_schema in ('%s', '%s')".formatted(firstSchema, secondSchema), bothSchemasTablesValues);
878-
assertQuery("SELECT * FROM TABLE(iceberg.system.iceberg_tables(null)) WHERE table_schema in ('%s', '%s')".formatted(firstSchema, secondSchema), bothSchemasTablesValues);
879-
}
880-
finally {
881-
assertQuerySucceeds("DROP SCHEMA " + firstSchema);
882-
assertQuerySucceeds("DROP SCHEMA " + secondSchema);
883-
}
884-
}
885-
886-
protected AutoCloseable createAdditionalTables(String schema)
887-
{
888-
return () -> {};
889-
}
890-
891839
@Test
892840
public void testMetadataDeleteAfterCommitEnabled()
893841
throws IOException

plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java

Lines changed: 0 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,10 @@
1313
*/
1414
package io.trino.plugin.iceberg;
1515

16-
import com.google.common.collect.ImmutableList;
1716
import com.google.common.collect.ImmutableMap;
1817
import io.minio.messages.Event;
1918
import io.trino.Session;
20-
import io.trino.metastore.Column;
2119
import io.trino.metastore.HiveMetastore;
22-
import io.trino.metastore.HiveType;
23-
import io.trino.metastore.Table;
2420
import io.trino.plugin.hive.containers.Hive3MinioDataLake;
2521
import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore;
2622
import io.trino.testing.QueryRunner;
@@ -32,25 +28,18 @@
3228

3329
import java.util.List;
3430
import java.util.Map;
35-
import java.util.Optional;
3631
import java.util.Queue;
3732
import java.util.concurrent.ConcurrentLinkedQueue;
3833

3934
import static com.google.common.collect.ImmutableList.toImmutableList;
4035
import static com.google.common.collect.ImmutableSet.toImmutableSet;
41-
import static io.trino.metastore.PrincipalPrivileges.NO_PRIVILEGES;
42-
import static io.trino.plugin.hive.TableType.EXTERNAL_TABLE;
4336
import static io.trino.plugin.hive.TestingThriftHiveMetastoreBuilder.testingThriftHiveMetastoreBuilder;
44-
import static io.trino.plugin.iceberg.IcebergTestUtils.getHiveMetastore;
45-
import static io.trino.plugin.iceberg.catalog.AbstractIcebergTableOperations.ICEBERG_METASTORE_STORAGE_FORMAT;
4637
import static io.trino.testing.TestingNames.randomNameSuffix;
4738
import static io.trino.testing.containers.Minio.MINIO_ACCESS_KEY;
4839
import static io.trino.testing.containers.Minio.MINIO_REGION;
4940
import static io.trino.testing.containers.Minio.MINIO_SECRET_KEY;
5041
import static java.lang.String.format;
5142
import static java.util.Locale.ENGLISH;
52-
import static org.apache.iceberg.BaseMetastoreTableOperations.ICEBERG_TABLE_TYPE_VALUE;
53-
import static org.apache.iceberg.BaseMetastoreTableOperations.TABLE_TYPE_PROP;
5443
import static org.assertj.core.api.Assertions.assertThat;
5544
import static org.junit.jupiter.api.parallel.ExecutionMode.SAME_THREAD;
5645

@@ -245,25 +234,6 @@ public void testPathContainsSpecialCharacter()
245234
assertUpdate("DROP TABLE " + tableName);
246235
}
247236

248-
@Override
249-
protected AutoCloseable createAdditionalTables(String schema)
250-
{
251-
HiveMetastore metastore = getHiveMetastore(getQueryRunner());
252-
// simulate iceberg table created by spark with lowercase table type
253-
Table lowerCaseTableType = io.trino.metastore.Table.builder()
254-
.setDatabaseName(schema)
255-
.setTableName("lowercase_type_" + randomNameSuffix())
256-
.setOwner(Optional.empty())
257-
.setDataColumns(ImmutableList.of(new Column("id", HiveType.HIVE_STRING, Optional.empty(), ImmutableMap.of())))
258-
.setTableType(EXTERNAL_TABLE.name())
259-
.withStorage(storage -> storage.setStorageFormat(ICEBERG_METASTORE_STORAGE_FORMAT))
260-
.setParameter("EXTERNAL", "TRUE")
261-
.setParameter(TABLE_TYPE_PROP, ICEBERG_TABLE_TYPE_VALUE.toLowerCase(ENGLISH))
262-
.build();
263-
metastore.createTable(lowerCaseTableType, NO_PRIVILEGES);
264-
return () -> metastore.dropTable(lowerCaseTableType.getDatabaseName(), lowerCaseTableType.getTableName(), true);
265-
}
266-
267237
private String onMetastore(@Language("SQL") String sql)
268238
{
269239
return hiveMinioDataLake.getHiveHadoop().runOnMetastore(sql);

0 commit comments

Comments
 (0)