Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,7 @@ public AccessControl createAccessControl(AccessControlManager accessControlManag

AccessControl loggingInvocationsAccessControl = newProxy(
AccessControl.class,
new LoggingInvocationHandler(
accessControlManager,
new LoggingInvocationHandler.ReflectiveParameterNamesProvider(),
logger::debug));
new LoggingInvocationHandler(accessControlManager, logger::debug));

return ForwardingAccessControl.of(() -> {
if (logger.isDebugEnabled()) {
Expand Down
5 changes: 0 additions & 5 deletions lib/trino-plugin-toolkit/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,6 @@
<artifactId>log</artifactId>
</dependency>

<dependency>
<groupId>io.airlift</groupId>
<artifactId>parameternames</artifactId>
</dependency>

<dependency>
<groupId>io.airlift</groupId>
<artifactId>security</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,14 @@
*/
package io.trino.plugin.base.util;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.reflect.AbstractInvocationHandler;
import io.airlift.log.Logger;
import io.airlift.parameternames.ParameterNames;
import io.airlift.units.Duration;

import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Parameter;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.IntStream;
Expand All @@ -40,19 +35,17 @@ public class LoggingInvocationHandler
extends AbstractInvocationHandler
{
private final Object delegate;
private final ParameterNamesProvider parameterNames;
private final Consumer<String> logger;
private final boolean includeResult;

public LoggingInvocationHandler(Object delegate, ParameterNamesProvider parameterNames, Consumer<String> logger)
public LoggingInvocationHandler(Object delegate, Consumer<String> logger)
{
this(delegate, parameterNames, logger, false);
this(delegate, logger, false);
}

public LoggingInvocationHandler(Object delegate, ParameterNamesProvider parameterNames, Consumer<String> logger, boolean includeResult)
public LoggingInvocationHandler(Object delegate, Consumer<String> logger, boolean includeResult)
{
this.delegate = requireNonNull(delegate, "delegate is null");
this.parameterNames = requireNonNull(parameterNames, "parameterNames is null");
this.logger = requireNonNull(logger, "logger is null");
this.includeResult = includeResult;
}
Expand Down Expand Up @@ -82,9 +75,9 @@ protected Object handleInvocation(Object proxy, Method method, Object[] args)
return result;
}

private String invocationDescription(Method method, Object[] args)
private static String invocationDescription(Method method, Object[] args)
{
Optional<List<String>> parameterNames = this.parameterNames.getParameterNames(method);
Optional<List<String>> parameterNames = getParameterNames(method);
return "Invocation of " + method.getName() +
IntStream.range(0, args.length)
.mapToObj(i -> {
Expand All @@ -104,70 +97,14 @@ private static String formatArgument(Object arg)
return String.valueOf(arg);
}

public interface ParameterNamesProvider
private static Optional<List<String>> getParameterNames(Method method)
{
Optional<List<String>> getParameterNames(Method method);
}

public static class ReflectiveParameterNamesProvider
implements ParameterNamesProvider
{
@Override
public Optional<List<String>> getParameterNames(Method method)
{
Parameter[] parameters = method.getParameters();
if (Arrays.stream(parameters).noneMatch(Parameter::isNamePresent)) {
return Optional.empty();
}
return Arrays.stream(parameters)
.map(Parameter::getName)
.collect(collectingAndThen(toImmutableList(), Optional::of));
}
}

public static class AirliftParameterNamesProvider
implements ParameterNamesProvider
{
private static final Logger log = Logger.get(AirliftParameterNamesProvider.class);

private final Map<Method, List<String>> parameterNames;

public <I, C extends I> AirliftParameterNamesProvider(Class<I> interfaceClass, Class<C> implementationClass)
{
requireNonNull(interfaceClass, "interfaceClass is null");
requireNonNull(implementationClass, "implementationClass is null");

ImmutableMap.Builder<Method, List<String>> parameterNames = ImmutableMap.builder();
for (Method interfaceMethod : interfaceClass.getMethods()) {
tryGetParameterNamesForMethod(interfaceMethod, implementationClass)
.map(ImmutableList::copyOf)
.ifPresent(names -> parameterNames.put(interfaceMethod, names));
}
this.parameterNames = parameterNames.buildOrThrow();
}

private static Optional<List<String>> tryGetParameterNamesForMethod(Method interfaceMethod, Class<?> implementationClass)
{
Optional<List<String>> names = ParameterNames.tryGetParameterNames(interfaceMethod);
if (names.isPresent()) {
return names;
}

Method implementationMethod;
try {
implementationMethod = implementationClass.getMethod(interfaceMethod.getName(), interfaceMethod.getParameterTypes());
}
catch (NoSuchMethodException e) {
log.debug(e, "Could not find implementation for %s", interfaceMethod);
return Optional.empty();
}
return ParameterNames.tryGetParameterNames(implementationMethod);
}

@Override
public Optional<List<String>> getParameterNames(Method method)
{
return Optional.ofNullable(parameterNames.get(method));
Parameter[] parameters = method.getParameters();
if (Arrays.stream(parameters).noneMatch(Parameter::isNamePresent)) {
return Optional.empty();
}
return Arrays.stream(parameters)
.map(Parameter::getName)
.collect(collectingAndThen(toImmutableList(), Optional::of));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
*/
package io.trino.plugin.base.util;

import io.trino.plugin.base.util.LoggingInvocationHandler.ReflectiveParameterNamesProvider;
import org.testng.annotations.Test;

import java.lang.reflect.InvocationHandler;
Expand Down Expand Up @@ -43,7 +42,7 @@ public String run(boolean ok, String s)
}
};
List<String> messages = new ArrayList<>();
InvocationHandler handler = new LoggingInvocationHandler(delegate, new ReflectiveParameterNamesProvider(), messages::add);
InvocationHandler handler = new LoggingInvocationHandler(delegate, messages::add);
SomeInterface proxy = newProxy(SomeInterface.class, handler);

proxy.run(true, "xyz");
Expand Down Expand Up @@ -76,7 +75,7 @@ public String run(boolean ok, String s)
}
};
List<String> messages = new ArrayList<>();
InvocationHandler handler = new LoggingInvocationHandler(delegate, new ReflectiveParameterNamesProvider(), messages::add, true);
InvocationHandler handler = new LoggingInvocationHandler(delegate, messages::add, true);
SomeInterface proxy = newProxy(SomeInterface.class, handler);

proxy.run(true, "xyz");
Expand Down
10 changes: 10 additions & 0 deletions plugin/trino-accumulo/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,16 @@
<dep.log4j.version>1.2.17</dep.log4j.version>
</properties>

<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
<version>0.9.3-1</version>
</dependency>
</dependencies>
</dependencyManagement>

<dependencies>
<dependency>
<groupId>io.trino</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.base.jmx.MBeanServerModule;
import io.trino.plugin.base.util.LoggingInvocationHandler;
import io.trino.plugin.base.util.LoggingInvocationHandler.ReflectiveParameterNamesProvider;
import io.trino.plugin.jdbc.jmx.StatisticsAwareConnectionFactory;
import io.trino.plugin.jdbc.jmx.StatisticsAwareJdbcClient;
import org.weakref.jmx.guice.MBeanModule;
Expand Down Expand Up @@ -57,10 +56,7 @@ public JdbcClient createJdbcClientWithStats(@ForBaseJdbc JdbcClient client, Cata
{
Logger logger = Logger.get(format("io.trino.plugin.jdbc.%s.jdbcclient", catalogName));

JdbcClient loggingInvocationsJdbcClient = newProxy(JdbcClient.class, new LoggingInvocationHandler(
client,
new ReflectiveParameterNamesProvider(),
logger::debug));
JdbcClient loggingInvocationsJdbcClient = newProxy(JdbcClient.class, new LoggingInvocationHandler(client, logger::debug));

return new StatisticsAwareJdbcClient(ForwardingJdbcClient.of(() -> {
if (logger.isDebugEnabled()) {
Expand Down
5 changes: 5 additions & 0 deletions plugin/trino-hive/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,11 @@
<artifactId>hive-apache</artifactId>
</dependency>

<dependency>
<groupId>io.trino.hive</groupId>
<artifactId>hive-thrift</artifactId>
</dependency>

<dependency>
<groupId>io.airlift</groupId>
<artifactId>aircompressor</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
*/
package io.trino.plugin.hive;

import io.trino.hive.thrift.metastore.Table;
import io.trino.spi.connector.SchemaTableName;
import org.apache.hadoop.hive.metastore.api.Table;

import java.util.Optional;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import io.trino.hive.thrift.metastore.StorageDescriptor;
import io.trino.plugin.hive.metastore.SortingColumn;
import io.trino.plugin.hive.util.HiveBucketing;
import io.trino.plugin.hive.util.HiveBucketing.BucketingVersion;
import io.trino.spi.TrinoException;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;

import java.util.List;
import java.util.Map;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
package io.trino.plugin.hive;

import com.google.common.collect.ImmutableList;
import io.trino.hive.thrift.metastore.DataOperationType;
import io.trino.plugin.hive.acid.AcidOperation;
import io.trino.plugin.hive.acid.AcidTransaction;
import io.trino.plugin.hive.metastore.AcidTransactionOwner;
Expand All @@ -31,7 +32,6 @@
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.type.Type;
import org.apache.hadoop.hive.metastore.api.DataOperationType;

import java.util.List;
import java.util.Map;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
package io.trino.plugin.hive.acid;

import com.google.common.collect.ImmutableMap;
import io.trino.hive.thrift.metastore.DataOperationType;
import io.trino.orc.OrcWriter.OrcOperation;
import org.apache.hadoop.hive.metastore.api.DataOperationType;

import java.util.Map;
import java.util.Optional;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,20 @@
package io.trino.plugin.hive.metastore;

import com.linkedin.coral.common.HiveMetastoreClient;
import io.trino.hive.thrift.metastore.Database;
import io.trino.hive.thrift.metastore.FieldSchema;
import io.trino.hive.thrift.metastore.SerDeInfo;
import io.trino.hive.thrift.metastore.StorageDescriptor;
import io.trino.hive.thrift.metastore.Table;
import io.trino.plugin.hive.CoralTableRedirectionResolver;
import io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil;
import io.trino.spi.connector.SchemaTableName;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.Table;

import java.util.List;
import java.util.Optional;

import static io.trino.plugin.hive.metastore.PrincipalPrivileges.NO_PRIVILEGES;
import static io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiDatabase;
import static io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiTable;
import static java.util.Objects.requireNonNull;

/**
Expand Down Expand Up @@ -53,9 +57,11 @@ public List<String> getAllDatabases()

// returning null for missing entry is as per Coral's requirements
@Override
public Database getDatabase(String dbName)
public org.apache.hadoop.hive.metastore.api.Database getDatabase(String dbName)
{
return delegate.getDatabase(dbName).map(ThriftMetastoreUtil::toMetastoreApiDatabase).orElse(null);
return delegate.getDatabase(dbName)
.map(database -> toHiveDatabase(toMetastoreApiDatabase(database)))
.orElse(null);
}

@Override
Expand All @@ -70,12 +76,75 @@ public org.apache.hadoop.hive.metastore.api.Table getTable(String dbName, String
if (!dbName.isEmpty() && !tableName.isEmpty()) {
Optional<Table> redirected = tableRedirection.redirect(new SchemaTableName(dbName, tableName));
if (redirected.isPresent()) {
return redirected.get();
return toHiveTable(redirected.get());
}
}

return delegate.getTable(dbName, tableName)
.map(value -> ThriftMetastoreUtil.toMetastoreApiTable(value, NO_PRIVILEGES))
.map(value -> toHiveTable(toMetastoreApiTable(value, NO_PRIVILEGES)))
.orElse(null);
}

private static org.apache.hadoop.hive.metastore.api.Database toHiveDatabase(Database database)
{
var result = new org.apache.hadoop.hive.metastore.api.Database();
result.setName(database.getName());
result.setDescription(database.getDescription());
result.setLocationUri(database.getLocationUri());
result.setParameters(database.getParameters());
return result;
}

private static org.apache.hadoop.hive.metastore.api.Table toHiveTable(Table table)
{
var result = new org.apache.hadoop.hive.metastore.api.Table();
result.setDbName(table.getDbName());
result.setTableName(table.getTableName());
result.setTableType(table.getTableType());
result.setViewOriginalText(table.getViewOriginalText());
result.setViewExpandedText(table.getViewExpandedText());
result.setPartitionKeys(table.getPartitionKeys().stream()
.map(CoralSemiTransactionalHiveMSCAdapter::toHiveFieldSchema)
.toList());
result.setParameters(table.getParameters());
result.setSd(toHiveStorageDescriptor(table.getSd()));
return result;
}

private static org.apache.hadoop.hive.metastore.api.StorageDescriptor toHiveStorageDescriptor(StorageDescriptor storage)
{
var result = new org.apache.hadoop.hive.metastore.api.StorageDescriptor();
result.setCols(storage.getCols().stream()
.map(CoralSemiTransactionalHiveMSCAdapter::toHiveFieldSchema)
.toList());
result.setBucketCols(storage.getBucketCols());
result.setNumBuckets(storage.getNumBuckets());
result.setInputFormat(storage.getInputFormat());
result.setOutputFormat(storage.getInputFormat());
result.setSerdeInfo(toHiveSerdeInfo(storage.getSerdeInfo()));
result.setLocation(storage.getLocation());
result.setParameters(storage.getParameters());
return result;
}

private static org.apache.hadoop.hive.metastore.api.SerDeInfo toHiveSerdeInfo(SerDeInfo info)
{
var result = new org.apache.hadoop.hive.metastore.api.SerDeInfo();
result.setName(info.getName());
result.setDescription(info.getDescription());
result.setSerializationLib(info.getSerializationLib());
result.setSerializerClass(info.getSerializerClass());
result.setDeserializerClass(info.getDeserializerClass());
result.setParameters(info.getParameters());
return result;
}

private static org.apache.hadoop.hive.metastore.api.FieldSchema toHiveFieldSchema(FieldSchema field)
{
var result = new org.apache.hadoop.hive.metastore.api.FieldSchema();
result.setName(field.getName());
result.setType(field.getType());
result.setComment(field.getComment());
return result;
}
}
Loading