Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
*/
package io.trino.plugin.lakehouse;

import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
import io.airlift.bootstrap.LifeCycleManager;
import io.trino.plugin.hive.HiveSchemaProperties;
Expand All @@ -26,10 +27,13 @@
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorSplitManager;
import io.trino.spi.connector.ConnectorTransactionHandle;
import io.trino.spi.function.FunctionProvider;
import io.trino.spi.function.table.ConnectorTableFunction;
import io.trino.spi.session.PropertyMetadata;
import io.trino.spi.transaction.IsolationLevel;

import java.util.List;
import java.util.Optional;
import java.util.Set;

import static com.google.common.collect.Sets.immutableEnumSet;
Expand All @@ -51,6 +55,8 @@ public class LakehouseConnector
private final LakehouseSessionProperties sessionProperties;
private final LakehouseTableProperties tableProperties;
private final IcebergMaterializedViewProperties materializedViewProperties;
private final Set<ConnectorTableFunction> tableFunctions;
private final FunctionProvider functionProvider;

@Inject
public LakehouseConnector(
Expand All @@ -62,7 +68,9 @@ public LakehouseConnector(
LakehouseNodePartitioningProvider nodePartitioningProvider,
LakehouseSessionProperties sessionProperties,
LakehouseTableProperties tableProperties,
IcebergMaterializedViewProperties materializedViewProperties)
IcebergMaterializedViewProperties materializedViewProperties,
Set<ConnectorTableFunction> tableFunctions,
FunctionProvider functionProvider)
{
this.lifeCycleManager = requireNonNull(lifeCycleManager, "lifeCycleManager is null");
this.transactionManager = requireNonNull(transactionManager, "transactionManager is null");
Expand All @@ -73,6 +81,8 @@ public LakehouseConnector(
this.sessionProperties = requireNonNull(sessionProperties, "sessionProperties is null");
this.tableProperties = requireNonNull(tableProperties, "tableProperties is null");
this.materializedViewProperties = requireNonNull(materializedViewProperties, "materializedViewProperties is null");
this.tableFunctions = ImmutableSet.copyOf(requireNonNull(tableFunctions, "tableFunctions is null"));
this.functionProvider = requireNonNull(functionProvider, "functionProvider is null");
}

@Override
Expand Down Expand Up @@ -159,4 +169,16 @@ public Set<ConnectorCapabilities> getCapabilities()
{
return immutableEnumSet(NOT_NULL_COLUMN_CONSTRAINT, MATERIALIZED_VIEW_GRACE_PERIOD);
}

@Override
public Set<ConnectorTableFunction> getTableFunctions()
{
return tableFunctions;
}

@Override
public Optional<FunctionProvider> getFunctionProvider()
{
return Optional.of(functionProvider);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import io.trino.plugin.deltalake.DeltaLakeConfig;
import io.trino.plugin.deltalake.DeltaLakeExecutorModule;
import io.trino.plugin.deltalake.DeltaLakeFileSystemFactory;
import io.trino.plugin.deltalake.DeltaLakeFunctionProvider;
import io.trino.plugin.deltalake.DeltaLakeMergeResult;
import io.trino.plugin.deltalake.DeltaLakeMetadataFactory;
import io.trino.plugin.deltalake.DeltaLakeNodePartitioningProvider;
Expand All @@ -32,6 +33,7 @@
import io.trino.plugin.deltalake.DeltaLakeTableProperties;
import io.trino.plugin.deltalake.DeltaLakeTransactionManager;
import io.trino.plugin.deltalake.DeltaLakeWriterStats;
import io.trino.plugin.deltalake.functions.tablechanges.TableChangesProcessorProvider;
import io.trino.plugin.deltalake.metastore.DeltaLakeMetastoreModule;
import io.trino.plugin.deltalake.metastore.DeltaLakeTableMetadataScheduler;
import io.trino.plugin.deltalake.metastore.NoOpVendedCredentialsProvider;
Expand Down Expand Up @@ -103,6 +105,9 @@ protected void setup(Binder binder)
binder.bind(DeltaLakeTableMetadataScheduler.class).in(Scopes.SINGLETON);
newExporter(binder).export(DeltaLakeTableMetadataScheduler.class).withGeneratedName();

binder.bind(TableChangesProcessorProvider.class).in(Scopes.SINGLETON);
binder.bind(DeltaLakeFunctionProvider.class).in(Scopes.SINGLETON);

jsonCodecBinder(binder).bindJsonCodec(DataFileInfo.class);
jsonCodecBinder(binder).bindJsonCodec(DeltaLakeMergeResult.class);
jsonCodecBinder(binder).bindJsonCodec(ExtendedStatistics.class);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.lakehouse;

import com.google.inject.Inject;
import io.trino.plugin.deltalake.DeltaLakeFunctionProvider;
import io.trino.plugin.deltalake.functions.tablechanges.TableChangesTableFunctionHandle;
import io.trino.plugin.iceberg.functions.IcebergFunctionProvider;
import io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunctionHandle;
import io.trino.spi.function.FunctionProvider;
import io.trino.spi.function.table.ConnectorTableFunctionHandle;
import io.trino.spi.function.table.TableFunctionProcessorProviderFactory;

import static java.util.Objects.requireNonNull;

public class LakehouseFunctionProvider
implements FunctionProvider
{
private final DeltaLakeFunctionProvider deltaLakeFunctionProvider;
private final IcebergFunctionProvider icebergFunctionProvider;

@Inject
public LakehouseFunctionProvider(
DeltaLakeFunctionProvider deltaLakeFunctionProvider,
IcebergFunctionProvider icebergFunctionProvider)
{
this.deltaLakeFunctionProvider = requireNonNull(deltaLakeFunctionProvider, "deltaLakeFunctionProvider is null");
this.icebergFunctionProvider = requireNonNull(icebergFunctionProvider, "icebergFunctionProvider is null");
}

@Override
public TableFunctionProcessorProviderFactory getTableFunctionProcessorProviderFactory(ConnectorTableFunctionHandle functionHandle)
{
if (functionHandle instanceof TableChangesTableFunctionHandle) {
return deltaLakeFunctionProvider.getTableFunctionProcessorProviderFactory(functionHandle);
}
if (functionHandle instanceof TableChangesFunctionHandle) {
return icebergFunctionProvider.getTableFunctionProcessorProviderFactory(functionHandle);
}
throw new UnsupportedOperationException("Unsupported function: " + functionHandle);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import io.trino.plugin.iceberg.catalog.glue.IcebergGlueCatalogModule;
import io.trino.plugin.iceberg.catalog.hms.IcebergHiveMetastoreCatalogModule;
import io.trino.plugin.iceberg.fileio.ForwardingFileIoFactory;
import io.trino.plugin.iceberg.functions.IcebergFunctionProvider;

import static com.google.inject.multibindings.OptionalBinder.newOptionalBinder;
import static io.airlift.configuration.ConfigBinder.configBinder;
Expand Down Expand Up @@ -75,6 +76,8 @@ protected void setup(Binder binder)

binder.bind(ForwardingFileIoFactory.class).in(Scopes.SINGLETON);

binder.bind(IcebergFunctionProvider.class).in(Scopes.SINGLETON);

install(switch (buildConfigObject(MetastoreTypeConfig.class).getMetastoreType()) {
case THRIFT -> new IcebergHiveMetastoreCatalogModule();
case FILE -> new IcebergFileMetastoreCatalogModule();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,12 @@
import io.trino.plugin.hive.orc.OrcWriterConfig;
import io.trino.plugin.hive.parquet.ParquetReaderConfig;
import io.trino.plugin.hive.parquet.ParquetWriterConfig;
import io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunctionProcessorProviderFactory;
import io.trino.plugin.lakehouse.functions.tablechanges.TableChangesFunctionProvider;
import io.trino.spi.function.FunctionProvider;
import io.trino.spi.function.table.ConnectorTableFunction;

import static com.google.inject.multibindings.Multibinder.newSetBinder;
import static io.airlift.configuration.ConfigBinder.configBinder;
import static org.weakref.jmx.guice.ExportBinder.newExporter;

Expand Down Expand Up @@ -53,6 +58,10 @@ protected void setup(Binder binder)
binder.bind(FileFormatDataSourceStats.class).in(Scopes.SINGLETON);
newExporter(binder).export(FileFormatDataSourceStats.class).withGeneratedName();

newSetBinder(binder, ConnectorTableFunction.class).addBinding().toProvider(TableChangesFunctionProvider.class).in(Scopes.SINGLETON);
binder.bind(FunctionProvider.class).to(LakehouseFunctionProvider.class).in(Scopes.SINGLETON);
binder.bind(TableChangesFunctionProcessorProviderFactory.class).in(Scopes.SINGLETON);

binder.bind(Key.get(boolean.class, HideDeltaLakeTables.class)).toInstance(false);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.lakehouse.functions.tablechanges;

import com.google.common.collect.ImmutableList;
import io.trino.plugin.deltalake.metastore.NotADeltaLakeTableException;
import io.trino.plugin.iceberg.UnknownTableTypeException;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.ConnectorAccessControl;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorTransactionHandle;
import io.trino.spi.function.table.AbstractConnectorTableFunction;
import io.trino.spi.function.table.Argument;
import io.trino.spi.function.table.ScalarArgumentSpecification;
import io.trino.spi.function.table.TableFunctionAnalysis;

import java.util.Map;

import static io.trino.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT;
import static io.trino.spi.function.table.ReturnTypeSpecification.GenericTable.GENERIC_TABLE;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.VarcharType.VARCHAR;

public class TableChangesFunction
extends AbstractConnectorTableFunction
{
private static final String FUNCTION_NAME = "table_changes";
private static final String SCHEMA_NAME = "system";
private static final String NAME = "table_changes";
public static final String SCHEMA_NAME_ARGUMENT = "SCHEMA_NAME";
private static final String TABLE_NAME_ARGUMENT = "TABLE_NAME";
private static final String START_SNAPSHOT_VAR_NAME = "START_SNAPSHOT_ID";
private static final String END_SNAPSHOT_VAR_NAME = "END_SNAPSHOT_ID";
private static final String SINCE_VERSION_ARGUMENT = "SINCE_VERSION";

private final io.trino.plugin.deltalake.functions.tablechanges.TableChangesFunction deltaLakeTableChangesFunction;
private final io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunction icebergTableChangesFunction;

public TableChangesFunction(
io.trino.plugin.deltalake.functions.tablechanges.TableChangesFunction deltaLakeTableChangesFunction,
io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunction icebergTableChangesFunction)
{
super(
SCHEMA_NAME,
NAME,
ImmutableList.of(
ScalarArgumentSpecification.builder().name(SCHEMA_NAME_ARGUMENT).type(VARCHAR).build(),
ScalarArgumentSpecification.builder().name(TABLE_NAME_ARGUMENT).type(VARCHAR).build(),
ScalarArgumentSpecification.builder().name(START_SNAPSHOT_VAR_NAME).type(BIGINT).defaultValue(null).build(),
ScalarArgumentSpecification.builder().name(END_SNAPSHOT_VAR_NAME).type(BIGINT).defaultValue(null).build(),
ScalarArgumentSpecification.builder().name(SINCE_VERSION_ARGUMENT).type(BIGINT).defaultValue(null).build()),
GENERIC_TABLE);
this.deltaLakeTableChangesFunction = deltaLakeTableChangesFunction;
this.icebergTableChangesFunction = icebergTableChangesFunction;
}

@Override
public TableFunctionAnalysis analyze(
ConnectorSession session,
ConnectorTransactionHandle transaction,
Map<String, Argument> arguments,
ConnectorAccessControl accessControl)
{
try {
return deltaLakeTableChangesFunction.analyze(session, transaction, arguments, accessControl);
}
catch (NotADeltaLakeTableException _) {
checkNonNull(arguments.get(START_SNAPSHOT_VAR_NAME), START_SNAPSHOT_VAR_NAME);
checkNonNull(arguments.get(END_SNAPSHOT_VAR_NAME), END_SNAPSHOT_VAR_NAME);
try {
return icebergTableChangesFunction.analyze(session, transaction, arguments, accessControl);
}
catch (UnknownTableTypeException e) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, "table_changes function is not supported for the given table type");
}
}
}

private void checkNonNull(Object argumentValue, String argumentName)
{
if (argumentValue == null) {
throw new TrinoException(INVALID_FUNCTION_ARGUMENT, FUNCTION_NAME + " argument " + argumentName + " may not be null");
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.lakehouse.functions.tablechanges;

import com.google.inject.Inject;
import com.google.inject.Provider;
import io.trino.plugin.base.classloader.ClassLoaderSafeConnectorTableFunction;
import io.trino.plugin.deltalake.DeltaLakeMetadataFactory;
import io.trino.plugin.iceberg.catalog.TrinoCatalogFactory;
import io.trino.spi.function.table.ConnectorTableFunction;
import io.trino.spi.type.TypeManager;

import static java.util.Objects.requireNonNull;

public class TableChangesFunctionProvider
implements Provider<ConnectorTableFunction>
{
private final DeltaLakeMetadataFactory deltaLakeMetadataFactory;

private final TrinoCatalogFactory trinoCatalogFactory;
private final TypeManager typeManager;

@Inject
public TableChangesFunctionProvider(
DeltaLakeMetadataFactory deltaLakeMetadataFactory,
TrinoCatalogFactory trinoCatalogFactory,
TypeManager typeManager)
{
this.deltaLakeMetadataFactory = requireNonNull(deltaLakeMetadataFactory, "deltaLakeMetadataFactory is null");
this.trinoCatalogFactory = requireNonNull(trinoCatalogFactory, "trinoCatalogFactory is null");
this.typeManager = requireNonNull(typeManager, "typeManager is null");
}

@Override
public ConnectorTableFunction get()
{
io.trino.plugin.deltalake.functions.tablechanges.TableChangesFunction deltaLakeTableChangesFunction =
new io.trino.plugin.deltalake.functions.tablechanges.TableChangesFunction(deltaLakeMetadataFactory);
io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunction icebergTableChangesFunction =
new io.trino.plugin.iceberg.functions.tablechanges.TableChangesFunction(trinoCatalogFactory, typeManager);
return new ClassLoaderSafeConnectorTableFunction(new TableChangesFunction(deltaLakeTableChangesFunction, icebergTableChangesFunction), getClass().getClassLoader());
}
}
Loading
Loading