Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,6 @@
import static com.google.common.base.Preconditions.checkState;
import static io.trino.operator.aggregation.TypedSet.createDistinctTypedSet;
import static io.trino.spi.StandardErrorCode.INVALID_CAST_ARGUMENT;
import static io.trino.spi.block.MethodHandleUtil.compose;
import static io.trino.spi.block.MethodHandleUtil.nativeValueWriter;
import static io.trino.spi.function.InvocationConvention.InvocationArgumentConvention.BLOCK_POSITION;
import static io.trino.spi.function.InvocationConvention.InvocationArgumentConvention.NEVER_NULL;
import static io.trino.spi.function.InvocationConvention.InvocationReturnConvention.FAIL_ON_NULL;
Expand All @@ -53,6 +51,8 @@
import static io.trino.spi.type.TypeSignature.mapType;
import static io.trino.util.Failures.internalError;
import static io.trino.util.Reflection.methodHandle;
import static java.lang.invoke.MethodHandles.dropArguments;
import static java.lang.invoke.MethodHandles.foldArguments;
import static java.lang.invoke.MethodHandles.permuteArguments;
import static java.lang.invoke.MethodType.methodType;
import static java.util.Objects.requireNonNull;
Expand All @@ -77,6 +77,11 @@ public final class MapToMapCast
private static final MethodHandle CHECK_SLICE_IS_NOT_NULL = methodHandle(MapToMapCast.class, "checkSliceIsNotNull", Slice.class);
private static final MethodHandle CHECK_BLOCK_IS_NOT_NULL = methodHandle(MapToMapCast.class, "checkBlockIsNotNull", Block.class);

private static final MethodHandle WRITE_LONG = methodHandle(Type.class, "writeLong", BlockBuilder.class, long.class);
private static final MethodHandle WRITE_DOUBLE = methodHandle(Type.class, "writeDouble", BlockBuilder.class, double.class);
private static final MethodHandle WRITE_BOOLEAN = methodHandle(Type.class, "writeBoolean", BlockBuilder.class, boolean.class);
private static final MethodHandle WRITE_OBJECT = methodHandle(Type.class, "writeObject", BlockBuilder.class, Object.class);

private final BlockTypeOperators blockTypeOperators;

public MapToMapCast(BlockTypeOperators blockTypeOperators)
Expand Down Expand Up @@ -136,14 +141,15 @@ private MethodHandle buildProcessor(FunctionDependencies functionDependencies, T
MethodHandle cast = functionDependencies.getCastInvoker(fromType, toType, invocationConvention).getMethodHandle();
// Normalize cast to have connector session as first argument
if (cast.type().parameterArray()[0] != ConnectorSession.class) {
cast = MethodHandles.dropArguments(cast, 0, ConnectorSession.class);
cast = dropArguments(cast, 0, ConnectorSession.class);
}
// Change cast signature to (Block.class, int.class, ConnectorSession.class):T
cast = permuteArguments(cast, methodType(cast.type().returnType(), Block.class, int.class, ConnectorSession.class), 2, 0, 1);

// If the key cast function is nullable, check the result is not null
if (isKey && functionNullability.isReturnNullable()) {
cast = compose(nullChecker(cast.type().returnType()), cast);
MethodHandle target = nullChecker(cast.type().returnType());
cast = foldArguments(dropArguments(target, 1, cast.type().parameterList()), cast);
}

// get write method with signature: (T, BlockBuilder.class):void
Expand All @@ -153,7 +159,7 @@ private MethodHandle buildProcessor(FunctionDependencies functionDependencies, T
// ensure cast returns type expected by the writer
cast = cast.asType(methodType(writer.type().parameterType(0), cast.type().parameterArray()));

return compose(writer, cast);
return foldArguments(dropArguments(writer, 1, cast.type().parameterList()), cast);
}

/**
Expand Down Expand Up @@ -287,4 +293,28 @@ public static Block mapCast(
mapBlockBuilder.closeEntry();
return (Block) targetType.getObject(mapBlockBuilder, mapBlockBuilder.getPositionCount() - 1);
}

public static MethodHandle nativeValueWriter(Type type)
{
Class<?> javaType = type.getJavaType();

MethodHandle methodHandle;
if (javaType == long.class) {
methodHandle = WRITE_LONG;
}
else if (javaType == double.class) {
methodHandle = WRITE_DOUBLE;
}
else if (javaType == boolean.class) {
methodHandle = WRITE_BOOLEAN;
}
else if (!javaType.isPrimitive()) {
methodHandle = WRITE_OBJECT;
}
else {
throw new IllegalArgumentException("Unknown java type " + javaType + " from type " + type);
}

return methodHandle.bindTo(type);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,14 @@

import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.trino.SessionTestUtils.TEST_SESSION;
import static io.trino.spi.block.MethodHandleUtil.methodHandle;
import static io.trino.sql.planner.TestingPlannerContext.plannerContextBuilder;
import static io.trino.testing.TestingAccessControlManager.TestingPrivilegeType.INSERT_TABLE;
import static io.trino.testing.TestingAccessControlManager.privilege;
import static io.trino.testing.TestingEventListenerManager.emptyEventListenerManager;
import static io.trino.testing.TestingHandles.TEST_CATALOG_HANDLE;
import static io.trino.testing.TestingHandles.TEST_CATALOG_NAME;
import static io.trino.testing.TestingSession.testSessionBuilder;
import static io.trino.util.Reflection.methodHandle;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static java.lang.String.format;

@Deprecated
public final class MethodHandleUtil
{
private static final MethodHandle WRITE_LONG = methodHandle(Type.class, "writeLong", BlockBuilder.class, long.class);
Expand All @@ -40,6 +41,7 @@ private MethodHandleUtil()
* @param g (T1, T2, ..., Tn)U
* @return (T1, T2, ..., Tn, S1, S2, ..., Sm)R
*/
@Deprecated
public static MethodHandle compose(MethodHandle f, MethodHandle g)
{
if (f.type().parameterType(0) != g.type().returnType()) {
Expand All @@ -59,6 +61,7 @@ public static MethodHandle compose(MethodHandle f, MethodHandle g)
* @param h (T1, T2, ..., Tn)V
* @return (S1, S2, ..., Sm, T1, T2, ..., Tn)R
*/
@Deprecated
public static MethodHandle compose(MethodHandle f, MethodHandle g, MethodHandle h)
{
if (f.type().parameterCount() != 2) {
Expand Down Expand Up @@ -106,6 +109,7 @@ public static MethodHandle compose(MethodHandle f, MethodHandle g, MethodHandle
* method in a tight loop can create significant GC pressure and significantly increase
* application pause time.
*/
@Deprecated
public static MethodHandle methodHandle(Class<?> clazz, String name, Class<?>... parameterTypes)
{
try {
Expand All @@ -116,6 +120,7 @@ public static MethodHandle methodHandle(Class<?> clazz, String name, Class<?>...
}
}

@Deprecated
public static MethodHandle nativeValueWriter(Type type)
{
Class<?> javaType = type.getJavaType();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,22 @@
import java.lang.invoke.MethodHandle;
import java.util.Optional;

import static io.trino.spi.block.MethodHandleUtil.methodHandle;
import static java.lang.invoke.MethodHandles.lookup;
import static java.util.Objects.requireNonNull;

public class FlushJdbcMetadataCacheProcedure
implements Provider<Procedure>
{
private static final MethodHandle FLUSH_JDBC_METADATA_CACHE = methodHandle(
FlushJdbcMetadataCacheProcedure.class,
"flushMetadataCache");
private static final MethodHandle FLUSH_JDBC_METADATA_CACHE;

static {
try {
FLUSH_JDBC_METADATA_CACHE = lookup().unreflect(FlushJdbcMetadataCacheProcedure.class.getMethod("flushMetadataCache"));
}
catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}

private final CachingJdbcClient cachingJdbcClient;
private final Optional<CachingIdentifierMapping> cachingIdentifierMapping;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,22 +31,24 @@

import static io.trino.plugin.deltalake.procedure.Procedures.checkProcedureArgument;
import static io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT;
import static io.trino.spi.block.MethodHandleUtil.methodHandle;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static java.lang.String.format;
import static java.lang.invoke.MethodHandles.lookup;
import static java.util.Objects.requireNonNull;

public class DropExtendedStatsProcedure
implements Provider<Procedure>
{
private static final MethodHandle PROCEDURE_METHOD = methodHandle(
DropExtendedStatsProcedure.class,
"dropStats",
ConnectorSession.class,
ConnectorAccessControl.class,
// Schema name and table name
String.class,
String.class);
private static final MethodHandle PROCEDURE_METHOD;

static {
try {
PROCEDURE_METHOD = lookup().unreflect(DropExtendedStatsProcedure.class.getMethod("dropStats", ConnectorSession.class, ConnectorAccessControl.class, String.class, String.class));
}
catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}

private final DeltaLakeMetadataFactory metadataFactory;
private final ExtendedStatisticsAccess statsAccess;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,9 @@
import static io.trino.plugin.deltalake.transactionlog.TransactionLogUtil.TRANSACTION_LOG_DIRECTORY;
import static io.trino.plugin.deltalake.transactionlog.TransactionLogUtil.getTransactionLogDir;
import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static io.trino.spi.block.MethodHandleUtil.methodHandle;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static java.lang.String.format;
import static java.lang.invoke.MethodHandles.lookup;
import static java.util.Comparator.naturalOrder;
import static java.util.Objects.requireNonNull;

Expand All @@ -70,14 +70,16 @@ public class VacuumProcedure
{
private static final Logger log = Logger.get(VacuumProcedure.class);

private static final MethodHandle VACUUM = methodHandle(
VacuumProcedure.class,
"vacuum",
ConnectorSession.class,
ConnectorAccessControl.class,
String.class,
String.class,
String.class);
private static final MethodHandle VACUUM;

static {
try {
VACUUM = lookup().unreflect(VacuumProcedure.class.getMethod("vacuum", ConnectorSession.class, ConnectorAccessControl.class, String.class, String.class, String.class));
}
catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}

private final CatalogName catalogName;
private final HdfsEnvironment hdfsEnvironment;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@
import java.util.Optional;

import static com.google.common.base.Preconditions.checkState;
import static io.trino.spi.block.MethodHandleUtil.methodHandle;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static java.lang.String.format;
import static java.lang.invoke.MethodHandles.lookup;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;

Expand All @@ -56,14 +56,17 @@ public class FlushHiveMetastoreCacheProcedure
PARAM_PARTITION_COLUMN.toLowerCase(ENGLISH),
PARAM_PARTITION_VALUE.toLowerCase(ENGLISH));

private static final MethodHandle FLUSH_HIVE_METASTORE_CACHE = methodHandle(
FlushHiveMetastoreCacheProcedure.class,
"flushMetadataCache",
String.class,
String.class,
String.class,
List.class,
List.class);
private static final MethodHandle FLUSH_HIVE_METASTORE_CACHE;

static {
try {
FLUSH_HIVE_METASTORE_CACHE = lookup().unreflect(FlushHiveMetastoreCacheProcedure.class.getMethod("flushMetadataCache", String.class, String.class, String.class, List.class, List.class));
}
catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}

private static final String FAKE_PARAM_DEFAULT_VALUE = "procedure should only be invoked with named parameters";

private final Optional<CachingHiveMetastore> cachingHiveMetastore;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,22 @@
import java.io.IOException;
import java.lang.invoke.MethodHandle;

import static io.trino.spi.block.MethodHandleUtil.methodHandle;
import static java.lang.invoke.MethodHandles.lookup;
import static java.util.Objects.requireNonNull;

public class WriteHiveMetastoreRecordingProcedure
implements Provider<Procedure>
{
private static final MethodHandle WRITE_HIVE_METASTORE_RECORDING = methodHandle(
WriteHiveMetastoreRecordingProcedure.class,
"writeHiveMetastoreRecording");
private static final MethodHandle WRITE_HIVE_METASTORE_RECORDING;

static {
try {
WRITE_HIVE_METASTORE_RECORDING = lookup().unreflect(WriteHiveMetastoreRecordingProcedure.class.getMethod("writeHiveMetastoreRecording"));
}
catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}

private final RateLimiter rateLimiter = RateLimiter.create(0.2);
private final HiveMetastoreRecording hiveMetastoreRecording;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,24 +48,25 @@
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.trino.spi.StandardErrorCode.ALREADY_EXISTS;
import static io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT;
import static io.trino.spi.block.MethodHandleUtil.methodHandle;
import static io.trino.spi.connector.RetryMode.NO_RETRIES;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static java.lang.String.format;
import static java.lang.invoke.MethodHandles.lookup;
import static java.util.Objects.requireNonNull;

public class CreateEmptyPartitionProcedure
implements Provider<Procedure>
{
private static final MethodHandle CREATE_EMPTY_PARTITION = methodHandle(
CreateEmptyPartitionProcedure.class,
"createEmptyPartition",
ConnectorSession.class,
ConnectorAccessControl.class,
String.class,
String.class,
List.class,
List.class);
private static final MethodHandle CREATE_EMPTY_PARTITION;

static {
try {
CREATE_EMPTY_PARTITION = lookup().unreflect(CreateEmptyPartitionProcedure.class.getMethod("createEmptyPartition", ConnectorSession.class, ConnectorAccessControl.class, String.class, String.class, List.class, List.class));
}
catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}

private final TransactionalMetadataFactory hiveMetadataFactory;
private final LocationService locationService;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.trino.plugin.hive.acid.AcidTransaction.NO_ACID_TRANSACTION;
import static io.trino.spi.StandardErrorCode.INVALID_PROCEDURE_ARGUMENT;
import static io.trino.spi.block.MethodHandleUtil.methodHandle;
import static io.trino.spi.type.VarcharType.VARCHAR;
import static java.lang.String.format;
import static java.lang.invoke.MethodHandles.lookup;
import static java.util.Objects.requireNonNull;
import static org.apache.hadoop.hive.metastore.utils.FileUtils.makePartName;

Expand All @@ -55,14 +55,16 @@
public class DropStatsProcedure
implements Provider<Procedure>
{
private static final MethodHandle DROP_STATS = methodHandle(
DropStatsProcedure.class,
"dropStats",
ConnectorSession.class,
ConnectorAccessControl.class,
String.class,
String.class,
List.class);
private static final MethodHandle DROP_STATS;

static {
try {
DROP_STATS = lookup().unreflect(DropStatsProcedure.class.getMethod("dropStats", ConnectorSession.class, ConnectorAccessControl.class, String.class, String.class, List.class));
}
catch (ReflectiveOperationException e) {
throw new AssertionError(e);
}
}

private final TransactionalMetadataFactory hiveMetadataFactory;

Expand Down
Loading