diff --git a/core/trino-main/src/main/java/io/trino/security/AccessControlModule.java b/core/trino-main/src/main/java/io/trino/security/AccessControlModule.java
index 2d94f495a269..eb97145b5af5 100644
--- a/core/trino-main/src/main/java/io/trino/security/AccessControlModule.java
+++ b/core/trino-main/src/main/java/io/trino/security/AccessControlModule.java
@@ -51,10 +51,7 @@ public AccessControl createAccessControl(AccessControlManager accessControlManag
AccessControl loggingInvocationsAccessControl = newProxy(
AccessControl.class,
- new LoggingInvocationHandler(
- accessControlManager,
- new LoggingInvocationHandler.ReflectiveParameterNamesProvider(),
- logger::debug));
+ new LoggingInvocationHandler(accessControlManager, logger::debug));
return ForwardingAccessControl.of(() -> {
if (logger.isDebugEnabled()) {
diff --git a/lib/trino-plugin-toolkit/pom.xml b/lib/trino-plugin-toolkit/pom.xml
index b5986b859803..a0172123d48c 100644
--- a/lib/trino-plugin-toolkit/pom.xml
+++ b/lib/trino-plugin-toolkit/pom.xml
@@ -48,11 +48,6 @@
log
-
- io.airlift
- parameternames
-
-
io.airlift
security
diff --git a/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/util/LoggingInvocationHandler.java b/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/util/LoggingInvocationHandler.java
index c86ef96bf41a..ed653262a231 100644
--- a/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/util/LoggingInvocationHandler.java
+++ b/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/util/LoggingInvocationHandler.java
@@ -13,11 +13,7 @@
*/
package io.trino.plugin.base.util;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
import com.google.common.reflect.AbstractInvocationHandler;
-import io.airlift.log.Logger;
-import io.airlift.parameternames.ParameterNames;
import io.airlift.units.Duration;
import java.lang.reflect.InvocationTargetException;
@@ -25,7 +21,6 @@
import java.lang.reflect.Parameter;
import java.util.Arrays;
import java.util.List;
-import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.IntStream;
@@ -40,19 +35,17 @@ public class LoggingInvocationHandler
extends AbstractInvocationHandler
{
private final Object delegate;
- private final ParameterNamesProvider parameterNames;
private final Consumer logger;
private final boolean includeResult;
- public LoggingInvocationHandler(Object delegate, ParameterNamesProvider parameterNames, Consumer logger)
+ public LoggingInvocationHandler(Object delegate, Consumer logger)
{
- this(delegate, parameterNames, logger, false);
+ this(delegate, logger, false);
}
- public LoggingInvocationHandler(Object delegate, ParameterNamesProvider parameterNames, Consumer logger, boolean includeResult)
+ public LoggingInvocationHandler(Object delegate, Consumer logger, boolean includeResult)
{
this.delegate = requireNonNull(delegate, "delegate is null");
- this.parameterNames = requireNonNull(parameterNames, "parameterNames is null");
this.logger = requireNonNull(logger, "logger is null");
this.includeResult = includeResult;
}
@@ -82,9 +75,9 @@ protected Object handleInvocation(Object proxy, Method method, Object[] args)
return result;
}
- private String invocationDescription(Method method, Object[] args)
+ private static String invocationDescription(Method method, Object[] args)
{
- Optional> parameterNames = this.parameterNames.getParameterNames(method);
+ Optional> parameterNames = getParameterNames(method);
return "Invocation of " + method.getName() +
IntStream.range(0, args.length)
.mapToObj(i -> {
@@ -104,70 +97,14 @@ private static String formatArgument(Object arg)
return String.valueOf(arg);
}
- public interface ParameterNamesProvider
+ private static Optional> getParameterNames(Method method)
{
- Optional> getParameterNames(Method method);
- }
-
- public static class ReflectiveParameterNamesProvider
- implements ParameterNamesProvider
- {
- @Override
- public Optional> getParameterNames(Method method)
- {
- Parameter[] parameters = method.getParameters();
- if (Arrays.stream(parameters).noneMatch(Parameter::isNamePresent)) {
- return Optional.empty();
- }
- return Arrays.stream(parameters)
- .map(Parameter::getName)
- .collect(collectingAndThen(toImmutableList(), Optional::of));
- }
- }
-
- public static class AirliftParameterNamesProvider
- implements ParameterNamesProvider
- {
- private static final Logger log = Logger.get(AirliftParameterNamesProvider.class);
-
- private final Map> parameterNames;
-
- public AirliftParameterNamesProvider(Class interfaceClass, Class implementationClass)
- {
- requireNonNull(interfaceClass, "interfaceClass is null");
- requireNonNull(implementationClass, "implementationClass is null");
-
- ImmutableMap.Builder> parameterNames = ImmutableMap.builder();
- for (Method interfaceMethod : interfaceClass.getMethods()) {
- tryGetParameterNamesForMethod(interfaceMethod, implementationClass)
- .map(ImmutableList::copyOf)
- .ifPresent(names -> parameterNames.put(interfaceMethod, names));
- }
- this.parameterNames = parameterNames.buildOrThrow();
- }
-
- private static Optional> tryGetParameterNamesForMethod(Method interfaceMethod, Class> implementationClass)
- {
- Optional> names = ParameterNames.tryGetParameterNames(interfaceMethod);
- if (names.isPresent()) {
- return names;
- }
-
- Method implementationMethod;
- try {
- implementationMethod = implementationClass.getMethod(interfaceMethod.getName(), interfaceMethod.getParameterTypes());
- }
- catch (NoSuchMethodException e) {
- log.debug(e, "Could not find implementation for %s", interfaceMethod);
- return Optional.empty();
- }
- return ParameterNames.tryGetParameterNames(implementationMethod);
- }
-
- @Override
- public Optional> getParameterNames(Method method)
- {
- return Optional.ofNullable(parameterNames.get(method));
+ Parameter[] parameters = method.getParameters();
+ if (Arrays.stream(parameters).noneMatch(Parameter::isNamePresent)) {
+ return Optional.empty();
}
+ return Arrays.stream(parameters)
+ .map(Parameter::getName)
+ .collect(collectingAndThen(toImmutableList(), Optional::of));
}
}
diff --git a/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/util/TestLoggingInvocationHandler.java b/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/util/TestLoggingInvocationHandler.java
index f814b98da69b..52cc3e5ddb6a 100644
--- a/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/util/TestLoggingInvocationHandler.java
+++ b/lib/trino-plugin-toolkit/src/test/java/io/trino/plugin/base/util/TestLoggingInvocationHandler.java
@@ -13,7 +13,6 @@
*/
package io.trino.plugin.base.util;
-import io.trino.plugin.base.util.LoggingInvocationHandler.ReflectiveParameterNamesProvider;
import org.testng.annotations.Test;
import java.lang.reflect.InvocationHandler;
@@ -43,7 +42,7 @@ public String run(boolean ok, String s)
}
};
List messages = new ArrayList<>();
- InvocationHandler handler = new LoggingInvocationHandler(delegate, new ReflectiveParameterNamesProvider(), messages::add);
+ InvocationHandler handler = new LoggingInvocationHandler(delegate, messages::add);
SomeInterface proxy = newProxy(SomeInterface.class, handler);
proxy.run(true, "xyz");
@@ -76,7 +75,7 @@ public String run(boolean ok, String s)
}
};
List messages = new ArrayList<>();
- InvocationHandler handler = new LoggingInvocationHandler(delegate, new ReflectiveParameterNamesProvider(), messages::add, true);
+ InvocationHandler handler = new LoggingInvocationHandler(delegate, messages::add, true);
SomeInterface proxy = newProxy(SomeInterface.class, handler);
proxy.run(true, "xyz");
diff --git a/plugin/trino-accumulo/pom.xml b/plugin/trino-accumulo/pom.xml
index 6f699b857d34..2c9f0ffb31ca 100644
--- a/plugin/trino-accumulo/pom.xml
+++ b/plugin/trino-accumulo/pom.xml
@@ -19,6 +19,16 @@
1.2.17
+
+
+
+ org.apache.thrift
+ libthrift
+ 0.9.3-1
+
+
+
+
io.trino
diff --git a/plugin/trino-base-jdbc/src/main/java/io/trino/plugin/jdbc/JdbcDiagnosticModule.java b/plugin/trino-base-jdbc/src/main/java/io/trino/plugin/jdbc/JdbcDiagnosticModule.java
index 2418ebfb44d6..557095b85053 100644
--- a/plugin/trino-base-jdbc/src/main/java/io/trino/plugin/jdbc/JdbcDiagnosticModule.java
+++ b/plugin/trino-base-jdbc/src/main/java/io/trino/plugin/jdbc/JdbcDiagnosticModule.java
@@ -23,7 +23,6 @@
import io.trino.plugin.base.CatalogName;
import io.trino.plugin.base.jmx.MBeanServerModule;
import io.trino.plugin.base.util.LoggingInvocationHandler;
-import io.trino.plugin.base.util.LoggingInvocationHandler.ReflectiveParameterNamesProvider;
import io.trino.plugin.jdbc.jmx.StatisticsAwareConnectionFactory;
import io.trino.plugin.jdbc.jmx.StatisticsAwareJdbcClient;
import org.weakref.jmx.guice.MBeanModule;
@@ -57,10 +56,7 @@ public JdbcClient createJdbcClientWithStats(@ForBaseJdbc JdbcClient client, Cata
{
Logger logger = Logger.get(format("io.trino.plugin.jdbc.%s.jdbcclient", catalogName));
- JdbcClient loggingInvocationsJdbcClient = newProxy(JdbcClient.class, new LoggingInvocationHandler(
- client,
- new ReflectiveParameterNamesProvider(),
- logger::debug));
+ JdbcClient loggingInvocationsJdbcClient = newProxy(JdbcClient.class, new LoggingInvocationHandler(client, logger::debug));
return new StatisticsAwareJdbcClient(ForwardingJdbcClient.of(() -> {
if (logger.isDebugEnabled()) {
diff --git a/plugin/trino-hive/pom.xml b/plugin/trino-hive/pom.xml
index 09465964bf0e..0912ff40b426 100644
--- a/plugin/trino-hive/pom.xml
+++ b/plugin/trino-hive/pom.xml
@@ -82,6 +82,11 @@
hive-apache
+
+ io.trino.hive
+ hive-thrift
+
+
io.airlift
aircompressor
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/CoralTableRedirectionResolver.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/CoralTableRedirectionResolver.java
index b838c96218dc..b52328857c71 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/CoralTableRedirectionResolver.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/CoralTableRedirectionResolver.java
@@ -13,8 +13,8 @@
*/
package io.trino.plugin.hive;
+import io.trino.hive.thrift.metastore.Table;
import io.trino.spi.connector.SchemaTableName;
-import org.apache.hadoop.hive.metastore.api.Table;
import java.util.Optional;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveBucketProperty.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveBucketProperty.java
index 15ce08b9a0a6..9e197dea404c 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveBucketProperty.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveBucketProperty.java
@@ -16,11 +16,11 @@
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
+import io.trino.hive.thrift.metastore.StorageDescriptor;
import io.trino.plugin.hive.metastore.SortingColumn;
import io.trino.plugin.hive.util.HiveBucketing;
import io.trino.plugin.hive.util.HiveBucketing.BucketingVersion;
import io.trino.spi.TrinoException;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import java.util.List;
import java.util.Map;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetastoreClosure.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetastoreClosure.java
index 7b3b8ce7baed..e2a223f7e7a1 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetastoreClosure.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetastoreClosure.java
@@ -14,6 +14,7 @@
package io.trino.plugin.hive;
import com.google.common.collect.ImmutableList;
+import io.trino.hive.thrift.metastore.DataOperationType;
import io.trino.plugin.hive.acid.AcidOperation;
import io.trino.plugin.hive.acid.AcidTransaction;
import io.trino.plugin.hive.metastore.AcidTransactionOwner;
@@ -31,7 +32,6 @@
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.type.Type;
-import org.apache.hadoop.hive.metastore.api.DataOperationType;
import java.util.List;
import java.util.Map;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/acid/AcidOperation.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/acid/AcidOperation.java
index 82c36af137df..e4a0f4b02c6d 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/acid/AcidOperation.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/acid/AcidOperation.java
@@ -14,8 +14,8 @@
package io.trino.plugin.hive.acid;
import com.google.common.collect.ImmutableMap;
+import io.trino.hive.thrift.metastore.DataOperationType;
import io.trino.orc.OrcWriter.OrcOperation;
-import org.apache.hadoop.hive.metastore.api.DataOperationType;
import java.util.Map;
import java.util.Optional;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/CoralSemiTransactionalHiveMSCAdapter.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/CoralSemiTransactionalHiveMSCAdapter.java
index af69a1d964e7..1a676436978e 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/CoralSemiTransactionalHiveMSCAdapter.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/CoralSemiTransactionalHiveMSCAdapter.java
@@ -14,16 +14,20 @@
package io.trino.plugin.hive.metastore;
import com.linkedin.coral.common.HiveMetastoreClient;
+import io.trino.hive.thrift.metastore.Database;
+import io.trino.hive.thrift.metastore.FieldSchema;
+import io.trino.hive.thrift.metastore.SerDeInfo;
+import io.trino.hive.thrift.metastore.StorageDescriptor;
+import io.trino.hive.thrift.metastore.Table;
import io.trino.plugin.hive.CoralTableRedirectionResolver;
-import io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil;
import io.trino.spi.connector.SchemaTableName;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.Table;
import java.util.List;
import java.util.Optional;
import static io.trino.plugin.hive.metastore.PrincipalPrivileges.NO_PRIVILEGES;
+import static io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiDatabase;
+import static io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreApiTable;
import static java.util.Objects.requireNonNull;
/**
@@ -53,9 +57,11 @@ public List getAllDatabases()
// returning null for missing entry is as per Coral's requirements
@Override
- public Database getDatabase(String dbName)
+ public org.apache.hadoop.hive.metastore.api.Database getDatabase(String dbName)
{
- return delegate.getDatabase(dbName).map(ThriftMetastoreUtil::toMetastoreApiDatabase).orElse(null);
+ return delegate.getDatabase(dbName)
+ .map(database -> toHiveDatabase(toMetastoreApiDatabase(database)))
+ .orElse(null);
}
@Override
@@ -70,12 +76,75 @@ public org.apache.hadoop.hive.metastore.api.Table getTable(String dbName, String
if (!dbName.isEmpty() && !tableName.isEmpty()) {
Optional redirected = tableRedirection.redirect(new SchemaTableName(dbName, tableName));
if (redirected.isPresent()) {
- return redirected.get();
+ return toHiveTable(redirected.get());
}
}
return delegate.getTable(dbName, tableName)
- .map(value -> ThriftMetastoreUtil.toMetastoreApiTable(value, NO_PRIVILEGES))
+ .map(value -> toHiveTable(toMetastoreApiTable(value, NO_PRIVILEGES)))
.orElse(null);
}
+
+ private static org.apache.hadoop.hive.metastore.api.Database toHiveDatabase(Database database)
+ {
+ var result = new org.apache.hadoop.hive.metastore.api.Database();
+ result.setName(database.getName());
+ result.setDescription(database.getDescription());
+ result.setLocationUri(database.getLocationUri());
+ result.setParameters(database.getParameters());
+ return result;
+ }
+
+ private static org.apache.hadoop.hive.metastore.api.Table toHiveTable(Table table)
+ {
+ var result = new org.apache.hadoop.hive.metastore.api.Table();
+ result.setDbName(table.getDbName());
+ result.setTableName(table.getTableName());
+ result.setTableType(table.getTableType());
+ result.setViewOriginalText(table.getViewOriginalText());
+ result.setViewExpandedText(table.getViewExpandedText());
+ result.setPartitionKeys(table.getPartitionKeys().stream()
+ .map(CoralSemiTransactionalHiveMSCAdapter::toHiveFieldSchema)
+ .toList());
+ result.setParameters(table.getParameters());
+ result.setSd(toHiveStorageDescriptor(table.getSd()));
+ return result;
+ }
+
+ private static org.apache.hadoop.hive.metastore.api.StorageDescriptor toHiveStorageDescriptor(StorageDescriptor storage)
+ {
+ var result = new org.apache.hadoop.hive.metastore.api.StorageDescriptor();
+ result.setCols(storage.getCols().stream()
+ .map(CoralSemiTransactionalHiveMSCAdapter::toHiveFieldSchema)
+ .toList());
+ result.setBucketCols(storage.getBucketCols());
+ result.setNumBuckets(storage.getNumBuckets());
+ result.setInputFormat(storage.getInputFormat());
+ result.setOutputFormat(storage.getInputFormat());
+ result.setSerdeInfo(toHiveSerdeInfo(storage.getSerdeInfo()));
+ result.setLocation(storage.getLocation());
+ result.setParameters(storage.getParameters());
+ return result;
+ }
+
+ private static org.apache.hadoop.hive.metastore.api.SerDeInfo toHiveSerdeInfo(SerDeInfo info)
+ {
+ var result = new org.apache.hadoop.hive.metastore.api.SerDeInfo();
+ result.setName(info.getName());
+ result.setDescription(info.getDescription());
+ result.setSerializationLib(info.getSerializationLib());
+ result.setSerializerClass(info.getSerializerClass());
+ result.setDeserializerClass(info.getDeserializerClass());
+ result.setParameters(info.getParameters());
+ return result;
+ }
+
+ private static org.apache.hadoop.hive.metastore.api.FieldSchema toHiveFieldSchema(FieldSchema field)
+ {
+ var result = new org.apache.hadoop.hive.metastore.api.FieldSchema();
+ result.setName(field.getName());
+ result.setType(field.getType());
+ result.setComment(field.getComment());
+ return result;
+ }
}
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/ForwardingHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/ForwardingHiveMetastore.java
index 0ec4a16282c6..468aeb5f3cec 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/ForwardingHiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/ForwardingHiveMetastore.java
@@ -13,6 +13,7 @@
*/
package io.trino.plugin.hive.metastore;
+import io.trino.hive.thrift.metastore.DataOperationType;
import io.trino.plugin.hive.HiveColumnStatisticType;
import io.trino.plugin.hive.HivePartition;
import io.trino.plugin.hive.HiveType;
@@ -24,7 +25,6 @@
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.type.Type;
-import org.apache.hadoop.hive.metastore.api.DataOperationType;
import java.util.List;
import java.util.Map;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/HiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/HiveMetastore.java
index 019cbce7f8d7..3eec2fa212eb 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/HiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/HiveMetastore.java
@@ -14,6 +14,7 @@
package io.trino.plugin.hive.metastore;
import com.google.common.collect.ImmutableMap;
+import io.trino.hive.thrift.metastore.DataOperationType;
import io.trino.plugin.hive.HiveColumnStatisticType;
import io.trino.plugin.hive.HivePartition;
import io.trino.plugin.hive.HiveType;
@@ -26,7 +27,6 @@
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.type.Type;
-import org.apache.hadoop.hive.metastore.api.DataOperationType;
import java.util.List;
import java.util.Map;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java
index d991573beeb5..da195235bb36 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/MetastoreUtil.java
@@ -58,6 +58,16 @@
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.base.Strings.nullToEmpty;
import static com.google.common.collect.ImmutableList.toImmutableList;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.BUCKET_COUNT;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.BUCKET_FIELD_NAME;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.FILE_INPUT_FORMAT;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.FILE_OUTPUT_FORMAT;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.META_TABLE_COLUMNS;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.META_TABLE_COLUMN_TYPES;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.META_TABLE_LOCATION;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.META_TABLE_NAME;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES;
import static io.trino.plugin.hive.HiveMetadata.AVRO_SCHEMA_LITERAL_KEY;
import static io.trino.plugin.hive.HiveMetadata.AVRO_SCHEMA_URL_KEY;
import static io.trino.plugin.hive.HiveSplitManager.PRESTO_OFFLINE;
@@ -75,16 +85,6 @@
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
import static org.apache.hadoop.hive.metastore.ColumnType.typeToThriftType;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.BUCKET_COUNT;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.BUCKET_FIELD_NAME;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_INPUT_FORMAT;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_OUTPUT_FORMAT;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMNS;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMN_TYPES;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_LOCATION;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES;
public final class MetastoreUtil
{
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java
index aecf99b59761..6ba978d8f934 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SemiTransactionalHiveMetastore.java
@@ -25,6 +25,7 @@
import io.airlift.units.Duration;
import io.trino.hdfs.HdfsContext;
import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hive.thrift.metastore.DataOperationType;
import io.trino.plugin.hive.HiveBasicStatistics;
import io.trino.plugin.hive.HiveColumnStatisticType;
import io.trino.plugin.hive.HiveMetastoreClosure;
@@ -58,7 +59,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hive.common.ValidTxnWriteIdList;
-import org.apache.hadoop.hive.metastore.api.DataOperationType;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import javax.annotation.concurrent.GuardedBy;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SortingColumn.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SortingColumn.java
index 67bffae1be4b..c5e313e333af 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SortingColumn.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/SortingColumn.java
@@ -90,7 +90,7 @@ public Order getOrder()
return order;
}
- public static SortingColumn fromMetastoreApiOrder(org.apache.hadoop.hive.metastore.api.Order order, String tablePartitionName)
+ public static SortingColumn fromMetastoreApiOrder(io.trino.hive.thrift.metastore.Order order, String tablePartitionName)
{
return new SortingColumn(order.getCol(), Order.fromMetastoreApiOrder(order.getOrder(), tablePartitionName));
}
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/cache/CachingHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/cache/CachingHiveMetastore.java
index 32e17823dad4..180e3f6f0cce 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/cache/CachingHiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/cache/CachingHiveMetastore.java
@@ -25,6 +25,7 @@
import io.airlift.jmx.CacheStatsMBean;
import io.airlift.units.Duration;
import io.trino.collect.cache.EvictableCacheBuilder;
+import io.trino.hive.thrift.metastore.DataOperationType;
import io.trino.plugin.hive.HiveColumnStatisticType;
import io.trino.plugin.hive.HivePartition;
import io.trino.plugin.hive.HiveType;
@@ -53,7 +54,6 @@
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.type.Type;
-import org.apache.hadoop.hive.metastore.api.DataOperationType;
import org.weakref.jmx.Managed;
import org.weakref.jmx.Nested;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/converter/GlueStatConverter.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/converter/GlueStatConverter.java
index 1d32f221ac18..5239395db5e2 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/converter/GlueStatConverter.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/glue/converter/GlueStatConverter.java
@@ -24,13 +24,13 @@
import com.amazonaws.services.glue.model.DoubleColumnStatisticsData;
import com.amazonaws.services.glue.model.LongColumnStatisticsData;
import com.amazonaws.services.glue.model.StringColumnStatisticsData;
+import io.trino.hive.thrift.metastore.Decimal;
import io.trino.plugin.hive.HiveType;
import io.trino.plugin.hive.metastore.Column;
import io.trino.plugin.hive.metastore.HiveColumnStatistics;
import io.trino.plugin.hive.metastore.Partition;
import io.trino.plugin.hive.metastore.Table;
import io.trino.spi.TrinoException;
-import org.apache.hadoop.hive.metastore.api.Decimal;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/BridgingHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/BridgingHiveMetastore.java
index 4a9402b894b4..99ca6d5e2227 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/BridgingHiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/BridgingHiveMetastore.java
@@ -14,6 +14,8 @@
package io.trino.plugin.hive.metastore.thrift;
import com.google.common.collect.ImmutableMap;
+import io.trino.hive.thrift.metastore.DataOperationType;
+import io.trino.hive.thrift.metastore.FieldSchema;
import io.trino.plugin.hive.HiveColumnStatisticType;
import io.trino.plugin.hive.HivePartition;
import io.trino.plugin.hive.HiveType;
@@ -38,8 +40,6 @@
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.type.Type;
-import org.apache.hadoop.hive.metastore.api.DataOperationType;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
import java.util.List;
import java.util.Map;
@@ -132,7 +132,7 @@ public void updateTableStatistics(String databaseName, String tableName, AcidTra
@Override
public void updatePartitionStatistics(Table table, Map> updates)
{
- org.apache.hadoop.hive.metastore.api.Table metastoreTable = toMetastoreApiTable(table);
+ io.trino.hive.thrift.metastore.Table metastoreTable = toMetastoreApiTable(table);
updates.forEach((partitionName, update) -> delegate.updatePartitionStatistics(metastoreTable, partitionName, update));
}
@@ -169,7 +169,7 @@ public void dropDatabase(String databaseName, boolean deleteData)
@Override
public void renameDatabase(String databaseName, String newDatabaseName)
{
- org.apache.hadoop.hive.metastore.api.Database database = delegate.getDatabase(databaseName)
+ io.trino.hive.thrift.metastore.Database database = delegate.getDatabase(databaseName)
.orElseThrow(() -> new SchemaNotFoundException(databaseName));
database.setName(newDatabaseName);
delegate.alterDatabase(databaseName, database);
@@ -216,7 +216,7 @@ public void replaceTable(String databaseName, String tableName, Table newTable,
@Override
public void renameTable(String databaseName, String tableName, String newDatabaseName, String newTableName)
{
- org.apache.hadoop.hive.metastore.api.Table table = delegate.getTable(databaseName, tableName)
+ io.trino.hive.thrift.metastore.Table table = delegate.getTable(databaseName, tableName)
.orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName)));
table.setDbName(newDatabaseName);
table.setTableName(newTableName);
@@ -226,7 +226,7 @@ public void renameTable(String databaseName, String tableName, String newDatabas
@Override
public void commentTable(String databaseName, String tableName, Optional comment)
{
- org.apache.hadoop.hive.metastore.api.Table table = delegate.getTable(databaseName, tableName)
+ io.trino.hive.thrift.metastore.Table table = delegate.getTable(databaseName, tableName)
.orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName)));
Map parameters = table.getParameters().entrySet().stream()
@@ -259,7 +259,7 @@ public void setTableOwner(String databaseName, String tableName, HivePrincipal p
@Override
public void commentColumn(String databaseName, String tableName, String columnName, Optional comment)
{
- org.apache.hadoop.hive.metastore.api.Table table = delegate.getTable(databaseName, tableName)
+ io.trino.hive.thrift.metastore.Table table = delegate.getTable(databaseName, tableName)
.orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName)));
for (FieldSchema fieldSchema : table.getSd().getCols()) {
@@ -279,7 +279,7 @@ public void commentColumn(String databaseName, String tableName, String columnNa
@Override
public void addColumn(String databaseName, String tableName, String columnName, HiveType columnType, String columnComment)
{
- org.apache.hadoop.hive.metastore.api.Table table = delegate.getTable(databaseName, tableName)
+ io.trino.hive.thrift.metastore.Table table = delegate.getTable(databaseName, tableName)
.orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName)));
table.getSd().getCols().add(
new FieldSchema(columnName, columnType.getHiveTypeName().toString(), columnComment));
@@ -289,7 +289,7 @@ public void addColumn(String databaseName, String tableName, String columnName,
@Override
public void renameColumn(String databaseName, String tableName, String oldColumnName, String newColumnName)
{
- org.apache.hadoop.hive.metastore.api.Table table = delegate.getTable(databaseName, tableName)
+ io.trino.hive.thrift.metastore.Table table = delegate.getTable(databaseName, tableName)
.orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName)));
for (FieldSchema fieldSchema : table.getPartitionKeys()) {
if (fieldSchema.getName().equals(oldColumnName)) {
@@ -308,13 +308,13 @@ public void renameColumn(String databaseName, String tableName, String oldColumn
public void dropColumn(String databaseName, String tableName, String columnName)
{
verifyCanDropColumn(this, databaseName, tableName, columnName);
- org.apache.hadoop.hive.metastore.api.Table table = delegate.getTable(databaseName, tableName)
+ io.trino.hive.thrift.metastore.Table table = delegate.getTable(databaseName, tableName)
.orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName)));
table.getSd().getCols().removeIf(fieldSchema -> fieldSchema.getName().equals(columnName));
alterTable(databaseName, tableName, table);
}
- private void alterTable(String databaseName, String tableName, org.apache.hadoop.hive.metastore.api.Table table)
+ private void alterTable(String databaseName, String tableName, io.trino.hive.thrift.metastore.Table table)
{
delegate.alterTable(databaseName, tableName, table);
}
@@ -356,7 +356,7 @@ public Map> getPartitionsByNames(Table table, List schema = table.getDataColumns().stream()
@@ -527,7 +527,7 @@ public void updateTableWriteId(String dbName, String tableName, long transaction
@Override
public void alterPartitions(String dbName, String tableName, List partitions, long writeId)
{
- List hadoopPartitions = partitions.stream()
+ List hadoopPartitions = partitions.stream()
.map(ThriftMetastoreUtil::toMetastoreApiPartition)
.peek(partition -> partition.setWriteId(writeId))
.collect(toImmutableList());
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/FailureAwareThriftMetastoreClient.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/FailureAwareThriftMetastoreClient.java
index e7b7a0dada1e..613c6423654d 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/FailureAwareThriftMetastoreClient.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/FailureAwareThriftMetastoreClient.java
@@ -14,22 +14,22 @@
package io.trino.plugin.hive.metastore.thrift;
import com.google.common.annotations.VisibleForTesting;
+import io.trino.hive.thrift.metastore.ColumnStatisticsObj;
+import io.trino.hive.thrift.metastore.Database;
+import io.trino.hive.thrift.metastore.EnvironmentContext;
+import io.trino.hive.thrift.metastore.FieldSchema;
+import io.trino.hive.thrift.metastore.HiveObjectPrivilege;
+import io.trino.hive.thrift.metastore.HiveObjectRef;
+import io.trino.hive.thrift.metastore.LockRequest;
+import io.trino.hive.thrift.metastore.LockResponse;
+import io.trino.hive.thrift.metastore.Partition;
+import io.trino.hive.thrift.metastore.PrincipalType;
+import io.trino.hive.thrift.metastore.PrivilegeBag;
+import io.trino.hive.thrift.metastore.Role;
+import io.trino.hive.thrift.metastore.RolePrincipalGrant;
+import io.trino.hive.thrift.metastore.Table;
+import io.trino.hive.thrift.metastore.TxnToWriteId;
import io.trino.plugin.hive.acid.AcidOperation;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
-import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
-import org.apache.hadoop.hive.metastore.api.LockRequest;
-import org.apache.hadoop.hive.metastore.api.LockResponse;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
-import org.apache.hadoop.hive.metastore.api.Role;
-import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.TxnToWriteId;
import org.apache.thrift.TException;
import java.util.List;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/KerberosHiveMetastoreAuthentication.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/KerberosHiveMetastoreAuthentication.java
index 2db61a890592..c35b425f9c1d 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/KerberosHiveMetastoreAuthentication.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/KerberosHiveMetastoreAuthentication.java
@@ -17,11 +17,11 @@
import io.trino.hdfs.authentication.HadoopAuthentication;
import io.trino.plugin.hive.ForHiveMetastore;
import org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier;
-import org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport;
import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hadoop.security.token.Token;
import org.apache.thrift.transport.TSaslClientTransport;
import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
import javax.inject.Inject;
import javax.security.auth.callback.Callback;
@@ -98,11 +98,14 @@ public TTransport authenticate(TTransport rawTransport, String hiveMetastoreHost
rawTransport);
}
- return new TUGIAssumingTransport(saslTransport, authentication.getUserGroupInformation());
+ return new TUgiAssumingTransport(saslTransport, authentication.getUserGroupInformation());
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
+ catch (TTransportException e) {
+ throw new RuntimeException(e);
+ }
}
private static Token decodeDelegationToken(String tokenValue)
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TFilterTransport.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TFilterTransport.java
new file mode 100644
index 000000000000..6d7c60c7f792
--- /dev/null
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TFilterTransport.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.hive.metastore.thrift;
+
+import org.apache.thrift.TConfiguration;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+
+import java.nio.ByteBuffer;
+
+import static java.util.Objects.requireNonNull;
+
+public abstract class TFilterTransport
+ extends TTransport
+{
+ protected final TTransport transport;
+
+ protected TFilterTransport(TTransport transport)
+ {
+ this.transport = requireNonNull(transport, "transport is null");
+ }
+
+ @Override
+ public boolean isOpen()
+ {
+ return transport.isOpen();
+ }
+
+ @Override
+ public boolean peek()
+ {
+ return transport.peek();
+ }
+
+ @Override
+ public void open()
+ throws TTransportException
+ {
+ transport.open();
+ }
+
+ @Override
+ public void close()
+ {
+ transport.close();
+ }
+
+ @Override
+ public int read(ByteBuffer dst)
+ throws TTransportException
+ {
+ return transport.read(dst);
+ }
+
+ @Override
+ public int read(byte[] buf, int off, int len)
+ throws TTransportException
+ {
+ return transport.read(buf, off, len);
+ }
+
+ @Override
+ public int readAll(byte[] buf, int off, int len)
+ throws TTransportException
+ {
+ return transport.readAll(buf, off, len);
+ }
+
+ @Override
+ public void write(byte[] buf)
+ throws TTransportException
+ {
+ transport.write(buf);
+ }
+
+ @Override
+ public void write(byte[] buf, int off, int len)
+ throws TTransportException
+ {
+ transport.write(buf, off, len);
+ }
+
+ @Override
+ public int write(ByteBuffer src)
+ throws TTransportException
+ {
+ return transport.write(src);
+ }
+
+ @Override
+ public void flush()
+ throws TTransportException
+ {
+ transport.flush();
+ }
+
+ @Override
+ public byte[] getBuffer()
+ {
+ return transport.getBuffer();
+ }
+
+ @Override
+ public int getBufferPosition()
+ {
+ return transport.getBufferPosition();
+ }
+
+ @Override
+ public int getBytesRemainingInBuffer()
+ {
+ return transport.getBytesRemainingInBuffer();
+ }
+
+ @Override
+ public void consumeBuffer(int len)
+ {
+ transport.consumeBuffer(len);
+ }
+
+ @Override
+ public TConfiguration getConfiguration()
+ {
+ return transport.getConfiguration();
+ }
+
+ @Override
+ public void updateKnownMessageSize(long size)
+ throws TTransportException
+ {
+ transport.updateKnownMessageSize(size);
+ }
+
+ @Override
+ public void checkReadBytesAvailable(long numBytes)
+ throws TTransportException
+ {
+ transport.checkReadBytesAvailable(numBytes);
+ }
+}
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TUgiAssumingTransport.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TUgiAssumingTransport.java
new file mode 100644
index 000000000000..4e610df875cd
--- /dev/null
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TUgiAssumingTransport.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.hive.metastore.thrift;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+
+import static io.trino.hdfs.authentication.UserGroupInformationUtils.executeActionInDoAs;
+import static java.util.Objects.requireNonNull;
+
+// based on org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport
+public class TUgiAssumingTransport
+ extends TFilterTransport
+{
+ private final UserGroupInformation ugi;
+
+ public TUgiAssumingTransport(TTransport transport, UserGroupInformation ugi)
+ {
+ super(transport);
+ this.ugi = requireNonNull(ugi, "ugi is null");
+ }
+
+ @Override
+ public void open()
+ throws TTransportException
+ {
+ executeActionInDoAs(ugi, () -> {
+ transport.open();
+ return null;
+ });
+ }
+}
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java
index 56fc8a56ac91..5bdafd082fb4 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastore.java
@@ -22,6 +22,37 @@
import io.airlift.units.Duration;
import io.trino.hdfs.HdfsContext;
import io.trino.hdfs.HdfsEnvironment;
+import io.trino.hive.thrift.metastore.AlreadyExistsException;
+import io.trino.hive.thrift.metastore.ColumnStatisticsObj;
+import io.trino.hive.thrift.metastore.ConfigValSecurityException;
+import io.trino.hive.thrift.metastore.DataOperationType;
+import io.trino.hive.thrift.metastore.Database;
+import io.trino.hive.thrift.metastore.EnvironmentContext;
+import io.trino.hive.thrift.metastore.FieldSchema;
+import io.trino.hive.thrift.metastore.HiveObjectPrivilege;
+import io.trino.hive.thrift.metastore.HiveObjectRef;
+import io.trino.hive.thrift.metastore.InvalidInputException;
+import io.trino.hive.thrift.metastore.InvalidObjectException;
+import io.trino.hive.thrift.metastore.InvalidOperationException;
+import io.trino.hive.thrift.metastore.LockComponent;
+import io.trino.hive.thrift.metastore.LockLevel;
+import io.trino.hive.thrift.metastore.LockRequest;
+import io.trino.hive.thrift.metastore.LockResponse;
+import io.trino.hive.thrift.metastore.LockState;
+import io.trino.hive.thrift.metastore.LockType;
+import io.trino.hive.thrift.metastore.MetaException;
+import io.trino.hive.thrift.metastore.NoSuchLockException;
+import io.trino.hive.thrift.metastore.NoSuchObjectException;
+import io.trino.hive.thrift.metastore.NoSuchTxnException;
+import io.trino.hive.thrift.metastore.Partition;
+import io.trino.hive.thrift.metastore.PrincipalType;
+import io.trino.hive.thrift.metastore.PrivilegeBag;
+import io.trino.hive.thrift.metastore.PrivilegeGrantInfo;
+import io.trino.hive.thrift.metastore.Table;
+import io.trino.hive.thrift.metastore.TxnAbortedException;
+import io.trino.hive.thrift.metastore.TxnToWriteId;
+import io.trino.hive.thrift.metastore.UnknownDBException;
+import io.trino.hive.thrift.metastore.UnknownTableException;
import io.trino.plugin.hive.HiveBasicStatistics;
import io.trino.plugin.hive.HiveColumnStatisticType;
import io.trino.plugin.hive.HivePartition;
@@ -49,44 +80,13 @@
import io.trino.spi.security.RoleGrant;
import io.trino.spi.type.Type;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.metastore.LockComponentBuilder;
-import org.apache.hadoop.hive.metastore.LockRequestBuilder;
-import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
-import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
-import org.apache.hadoop.hive.metastore.api.DataOperationType;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
-import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
-import org.apache.hadoop.hive.metastore.api.InvalidInputException;
-import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
-import org.apache.hadoop.hive.metastore.api.LockComponent;
-import org.apache.hadoop.hive.metastore.api.LockLevel;
-import org.apache.hadoop.hive.metastore.api.LockRequest;
-import org.apache.hadoop.hive.metastore.api.LockResponse;
-import org.apache.hadoop.hive.metastore.api.LockState;
-import org.apache.hadoop.hive.metastore.api.LockType;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchLockException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
-import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
-import org.apache.hadoop.hive.metastore.api.TxnToWriteId;
-import org.apache.hadoop.hive.metastore.api.UnknownDBException;
-import org.apache.hadoop.hive.metastore.api.UnknownTableException;
import org.apache.thrift.TException;
import javax.annotation.concurrent.ThreadSafe;
import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -110,6 +110,7 @@
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Sets.difference;
+import static io.trino.hive.thrift.metastore.HiveObjectType.TABLE;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_METASTORE_ERROR;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_TABLE_LOCK_NOT_ACQUIRED;
import static io.trino.plugin.hive.TableType.MANAGED_TABLE;
@@ -134,7 +135,6 @@
import static java.lang.String.format;
import static java.lang.System.nanoTime;
import static java.util.Objects.requireNonNull;
-import static org.apache.hadoop.hive.metastore.api.HiveObjectType.TABLE;
@ThreadSafe
public class ThriftHiveMetastore
@@ -1603,19 +1603,21 @@ private void acquireSharedLock(
return;
}
- LockRequestBuilder request = new LockRequestBuilder(queryId)
- .setTransactionId(transactionId)
- .setUser(transactionOwner.toString());
+ LockRequest request = new LockRequest()
+ .setHostname(getLocalHostName())
+ .setAgentInfo(queryId)
+ .setUser(transactionOwner.toString())
+ .setTxnid(transactionId);
for (SchemaTableName table : fullTables) {
- request.addLockComponent(createLockComponentForOperation(table, operation, isDynamicPartitionWrite, Optional.empty()));
+ request.addToComponent(createLockComponentForOperation(table, operation, isDynamicPartitionWrite, Optional.empty()));
}
for (HivePartition partition : partitions) {
- request.addLockComponent(createLockComponentForOperation(partition.getTableName(), operation, isDynamicPartitionWrite, Optional.of(partition.getPartitionId())));
+ request.addToComponent(createLockComponentForOperation(partition.getTableName(), operation, isDynamicPartitionWrite, Optional.of(partition.getPartitionId())));
}
- acquireLock(format("hive transaction %s for query %s", transactionId, queryId), request.build());
+ acquireLock(format("hive transaction %s for query %s", transactionId, queryId), request);
}
@Override
@@ -1626,13 +1628,19 @@ public long acquireTableExclusiveLock(
String tableName)
{
requireNonNull(transactionOwner, "transactionOwner is null");
- LockComponent lockComponent = new LockComponent(LockType.EXCLUSIVE, LockLevel.TABLE, dbName);
- lockComponent.setTablename(tableName);
- LockRequest lockRequest = new LockRequestBuilder(queryId)
- .addLockComponent(lockComponent)
- .setUser(transactionOwner.toString())
- .build();
- return acquireLock(format("query %s", queryId), lockRequest);
+
+ LockRequest request = new LockRequest()
+ .setHostname(getLocalHostName())
+ .setAgentInfo(queryId)
+ .setUser(transactionOwner.toString());
+
+ request.addToComponent(new LockComponent()
+ .setType(LockType.EXCLUSIVE)
+ .setLevel(LockLevel.TABLE)
+ .setDbname(dbName)
+ .setTablename(tableName));
+
+ return acquireLock(format("query %s", queryId), request);
}
private long acquireLock(String context, LockRequest lockRequest)
@@ -1716,18 +1724,33 @@ private static LockComponent createLockComponentForOperation(SchemaTableName tab
requireNonNull(table, "table is null");
requireNonNull(partitionName, "partitionName is null");
- LockComponentBuilder builder = new LockComponentBuilder();
- builder.setShared();
- builder.setOperationType(operation);
+ LockComponent component = new LockComponent()
+ .setType(LockType.SHARED_READ)
+ .setOperationType(operation)
+ .setDbname(table.getSchemaName())
+ .setTablename(table.getTableName())
+ // acquire locks is called only for TransactionalTable
+ .setIsTransactional(true)
+ .setIsDynamicPartitionWrite(isDynamicPartitionWrite)
+ .setLevel(LockLevel.TABLE);
- builder.setDbName(table.getSchemaName());
- builder.setTableName(table.getTableName());
- partitionName.ifPresent(builder::setPartitionName);
+ partitionName.ifPresent(name -> component
+ .setPartitionname(name)
+ .setLevel(LockLevel.PARTITION));
- // acquire locks is called only for TransactionalTable
- builder.setIsTransactional(true);
- builder.setIsDynamicPartitionWrite(isDynamicPartitionWrite);
- return builder.build();
+ return component;
+ }
+
+ private static String getLocalHostName()
+ {
+ String hostName;
+ try {
+ hostName = InetAddress.getLocalHost().getHostName();
+ }
+ catch (UnknownHostException e) {
+ throw new RuntimeException("Unable to determine local host");
+ }
+ return hostName;
}
@Override
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastoreClient.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastoreClient.java
index 1ede67af3116..80e94ac172bb 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastoreClient.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftHiveMetastoreClient.java
@@ -16,57 +16,54 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import io.airlift.log.Logger;
+import io.trino.hive.thrift.metastore.AbortTxnRequest;
+import io.trino.hive.thrift.metastore.AddDynamicPartitions;
+import io.trino.hive.thrift.metastore.AllocateTableWriteIdsRequest;
+import io.trino.hive.thrift.metastore.AllocateTableWriteIdsResponse;
+import io.trino.hive.thrift.metastore.AlterPartitionsRequest;
+import io.trino.hive.thrift.metastore.AlterTableRequest;
+import io.trino.hive.thrift.metastore.CheckLockRequest;
+import io.trino.hive.thrift.metastore.ClientCapabilities;
+import io.trino.hive.thrift.metastore.ClientCapability;
+import io.trino.hive.thrift.metastore.ColumnStatistics;
+import io.trino.hive.thrift.metastore.ColumnStatisticsDesc;
+import io.trino.hive.thrift.metastore.ColumnStatisticsObj;
+import io.trino.hive.thrift.metastore.CommitTxnRequest;
+import io.trino.hive.thrift.metastore.Database;
+import io.trino.hive.thrift.metastore.EnvironmentContext;
+import io.trino.hive.thrift.metastore.FieldSchema;
+import io.trino.hive.thrift.metastore.GetPrincipalsInRoleRequest;
+import io.trino.hive.thrift.metastore.GetPrincipalsInRoleResponse;
+import io.trino.hive.thrift.metastore.GetRoleGrantsForPrincipalRequest;
+import io.trino.hive.thrift.metastore.GetRoleGrantsForPrincipalResponse;
+import io.trino.hive.thrift.metastore.GetTableRequest;
+import io.trino.hive.thrift.metastore.GetValidWriteIdsRequest;
+import io.trino.hive.thrift.metastore.GrantRevokePrivilegeRequest;
+import io.trino.hive.thrift.metastore.GrantRevokeRoleRequest;
+import io.trino.hive.thrift.metastore.GrantRevokeRoleResponse;
+import io.trino.hive.thrift.metastore.HeartbeatTxnRangeRequest;
+import io.trino.hive.thrift.metastore.HiveObjectPrivilege;
+import io.trino.hive.thrift.metastore.HiveObjectRef;
+import io.trino.hive.thrift.metastore.LockRequest;
+import io.trino.hive.thrift.metastore.LockResponse;
+import io.trino.hive.thrift.metastore.MetaException;
+import io.trino.hive.thrift.metastore.NoSuchObjectException;
+import io.trino.hive.thrift.metastore.OpenTxnRequest;
+import io.trino.hive.thrift.metastore.Partition;
+import io.trino.hive.thrift.metastore.PartitionsStatsRequest;
+import io.trino.hive.thrift.metastore.PrincipalType;
+import io.trino.hive.thrift.metastore.PrivilegeBag;
+import io.trino.hive.thrift.metastore.Role;
+import io.trino.hive.thrift.metastore.RolePrincipalGrant;
+import io.trino.hive.thrift.metastore.Table;
+import io.trino.hive.thrift.metastore.TableStatsRequest;
+import io.trino.hive.thrift.metastore.TableValidWriteIds;
+import io.trino.hive.thrift.metastore.ThriftHiveMetastore;
+import io.trino.hive.thrift.metastore.TxnToWriteId;
+import io.trino.hive.thrift.metastore.UnlockRequest;
import io.trino.plugin.base.util.LoggingInvocationHandler;
-import io.trino.plugin.base.util.LoggingInvocationHandler.AirliftParameterNamesProvider;
-import io.trino.plugin.base.util.LoggingInvocationHandler.ParameterNamesProvider;
import io.trino.plugin.hive.acid.AcidOperation;
import io.trino.plugin.hive.metastore.thrift.MetastoreSupportsDateStatistics.DateStatisticsSupport;
-import org.apache.hadoop.hive.common.ValidTxnList;
-import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
-import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions;
-import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsRequest;
-import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsResponse;
-import org.apache.hadoop.hive.metastore.api.AlterPartitionsRequest;
-import org.apache.hadoop.hive.metastore.api.AlterTableRequest;
-import org.apache.hadoop.hive.metastore.api.CheckLockRequest;
-import org.apache.hadoop.hive.metastore.api.ClientCapabilities;
-import org.apache.hadoop.hive.metastore.api.ClientCapability;
-import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
-import org.apache.hadoop.hive.metastore.api.CommitTxnRequest;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest;
-import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleResponse;
-import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest;
-import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse;
-import org.apache.hadoop.hive.metastore.api.GetTableRequest;
-import org.apache.hadoop.hive.metastore.api.GetValidWriteIdsRequest;
-import org.apache.hadoop.hive.metastore.api.GrantRevokePrivilegeRequest;
-import org.apache.hadoop.hive.metastore.api.GrantRevokeRoleRequest;
-import org.apache.hadoop.hive.metastore.api.GrantRevokeRoleResponse;
-import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeRequest;
-import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
-import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
-import org.apache.hadoop.hive.metastore.api.LockRequest;
-import org.apache.hadoop.hive.metastore.api.LockResponse;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.OpenTxnRequest;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.PartitionsStatsRequest;
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
-import org.apache.hadoop.hive.metastore.api.Role;
-import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.TableStatsRequest;
-import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore;
-import org.apache.hadoop.hive.metastore.api.TxnToWriteId;
-import org.apache.hadoop.hive.metastore.api.UnlockRequest;
-import org.apache.hadoop.hive.metastore.txn.TxnUtils;
import org.apache.thrift.TApplicationException;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
@@ -87,17 +84,18 @@
import static com.google.common.base.Verify.verifyNotNull;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.reflect.Reflection.newProxy;
+import static io.trino.hive.thrift.metastore.GrantRevokeType.GRANT;
+import static io.trino.hive.thrift.metastore.GrantRevokeType.REVOKE;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.HIVE_FILTER_FIELD_PARAMS;
import static io.trino.plugin.hive.TableType.VIRTUAL_VIEW;
import static io.trino.plugin.hive.ViewReaderUtil.PRESTO_VIEW_FLAG;
import static io.trino.plugin.hive.metastore.thrift.MetastoreSupportsDateStatistics.DateStatisticsSupport.NOT_SUPPORTED;
import static io.trino.plugin.hive.metastore.thrift.MetastoreSupportsDateStatistics.DateStatisticsSupport.SUPPORTED;
import static io.trino.plugin.hive.metastore.thrift.MetastoreSupportsDateStatistics.DateStatisticsSupport.UNKNOWN;
+import static io.trino.plugin.hive.metastore.thrift.TxnUtils.createValidReadTxnList;
+import static io.trino.plugin.hive.metastore.thrift.TxnUtils.createValidTxnWriteIdList;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
-import static org.apache.hadoop.hive.metastore.api.GrantRevokeType.GRANT;
-import static org.apache.hadoop.hive.metastore.api.GrantRevokeType.REVOKE;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.HIVE_FILTER_FIELD_PARAMS;
-import static org.apache.hadoop.hive.metastore.txn.TxnUtils.createValidTxnWriteIdList;
import static org.apache.thrift.TApplicationException.UNKNOWN_METHOD;
public class ThriftHiveMetastoreClient
@@ -105,8 +103,6 @@ public class ThriftHiveMetastoreClient
{
private static final Logger log = Logger.get(ThriftHiveMetastoreClient.class);
- private static final ParameterNamesProvider PARAMETER_NAMES_PROVIDER = new AirliftParameterNamesProvider(ThriftHiveMetastore.Iface.class, ThriftHiveMetastore.Client.class);
-
private static final Pattern TABLE_PARAMETER_SAFE_KEY_PATTERN = Pattern.compile("^[a-zA-Z_]+$");
private static final Pattern TABLE_PARAMETER_SAFE_VALUE_PATTERN = Pattern.compile("^[a-zA-Z0-9\\s]*$");
@@ -151,7 +147,7 @@ private void connect()
transport = transportSupplier.createTransport();
ThriftHiveMetastore.Iface client = new ThriftHiveMetastore.Client(new TBinaryProtocol(transport));
if (log.isDebugEnabled()) {
- client = newProxy(ThriftHiveMetastore.Iface.class, new LoggingInvocationHandler(client, PARAMETER_NAMES_PROVIDER, log::debug));
+ client = newProxy(ThriftHiveMetastore.Iface.class, new LoggingInvocationHandler(client, log::debug));
}
this.client = client;
}
@@ -171,21 +167,21 @@ private void disconnect()
public List getAllDatabases()
throws TException
{
- return client.get_all_databases();
+ return client.getAllDatabases();
}
@Override
public Database getDatabase(String dbName)
throws TException
{
- return client.get_database(dbName);
+ return client.getDatabase(dbName);
}
@Override
public List getAllTables(String databaseName)
throws TException
{
- return client.get_all_tables(databaseName);
+ return client.getAllTables(databaseName);
}
@Override
@@ -195,7 +191,7 @@ public List getAllViews(String databaseName)
return alternativeCall(
exception -> !isUnknownMethodExceptionalResponse(exception),
chosenGetAllViewsAlternative,
- () -> client.get_tables_by_type(databaseName, ".*", VIRTUAL_VIEW.name()),
+ () -> client.getTablesByType(databaseName, ".*", VIRTUAL_VIEW.name()),
// fallback to enumerating Presto views only (Hive views can still be executed, but will be listed as tables and not views)
() -> getTablesWithParameter(databaseName, PRESTO_VIEW_FLAG, "true"));
}
@@ -224,50 +220,50 @@ public List getTablesWithParameter(String databaseName, String parameter
return alternativeCall(
ThriftHiveMetastoreClient::defaultIsValidExceptionalResponse,
chosenTableParamAlternative,
- () -> client.get_table_names_by_filter(databaseName, filterWithEquals, (short) -1),
- () -> client.get_table_names_by_filter(databaseName, filterWithLike, (short) -1));
+ () -> client.getTableNamesByFilter(databaseName, filterWithEquals, (short) -1),
+ () -> client.getTableNamesByFilter(databaseName, filterWithLike, (short) -1));
}
@Override
public void createDatabase(Database database)
throws TException
{
- client.create_database(database);
+ client.createDatabase(database);
}
@Override
public void dropDatabase(String databaseName, boolean deleteData, boolean cascade)
throws TException
{
- client.drop_database(databaseName, deleteData, cascade);
+ client.dropDatabase(databaseName, deleteData, cascade);
}
@Override
public void alterDatabase(String databaseName, Database database)
throws TException
{
- client.alter_database(databaseName, database);
+ client.alterDatabase(databaseName, database);
}
@Override
public void createTable(Table table)
throws TException
{
- client.create_table(table);
+ client.createTable(table);
}
@Override
public void dropTable(String databaseName, String name, boolean deleteData)
throws TException
{
- client.drop_table(databaseName, name, deleteData);
+ client.dropTable(databaseName, name, deleteData);
}
@Override
public void alterTableWithEnvironmentContext(String databaseName, String tableName, Table newTable, EnvironmentContext context)
throws TException
{
- client.alter_table_with_environment_context(databaseName, tableName, newTable, context);
+ client.alterTableWithEnvironmentContext(databaseName, tableName, newTable, context);
}
@Override
@@ -280,16 +276,16 @@ public Table getTable(String databaseName, String tableName)
() -> {
GetTableRequest request = new GetTableRequest(databaseName, tableName);
request.setCapabilities(new ClientCapabilities(ImmutableList.of(ClientCapability.INSERT_ONLY_TABLES)));
- return client.get_table_req(request).getTable();
+ return client.getTableReq(request).getTable();
},
- () -> client.get_table(databaseName, tableName));
+ () -> client.getTable(databaseName, tableName));
}
@Override
public List getFields(String databaseName, String tableName)
throws TException
{
- return client.get_fields(databaseName, tableName);
+ return client.getFields(databaseName, tableName);
}
@Override
@@ -297,7 +293,7 @@ public List getTableColumnStatistics(String databaseName, S
throws TException
{
TableStatsRequest tableStatsRequest = new TableStatsRequest(databaseName, tableName, columnNames);
- return client.get_table_statistics_req(tableStatsRequest).getTableStats();
+ return client.getTableStatisticsReq(tableStatsRequest).getTableStats();
}
@Override
@@ -310,7 +306,7 @@ public void setTableColumnStatistics(String databaseName, String tableName, List
stats -> {
ColumnStatisticsDesc statisticsDescription = new ColumnStatisticsDesc(true, databaseName, tableName);
ColumnStatistics request = new ColumnStatistics(statisticsDescription, stats);
- client.update_table_column_statistics(request);
+ client.updateTableColumnStatistics(request);
});
}
@@ -318,7 +314,7 @@ public void setTableColumnStatistics(String databaseName, String tableName, List
public void deleteTableColumnStatistics(String databaseName, String tableName, String columnName)
throws TException
{
- client.delete_table_column_statistics(databaseName, tableName, columnName);
+ client.deleteTableColumnStatistics(databaseName, tableName, columnName);
}
@Override
@@ -326,7 +322,7 @@ public Map> getPartitionColumnStatistics(Strin
throws TException
{
PartitionsStatsRequest partitionsStatsRequest = new PartitionsStatsRequest(databaseName, tableName, columnNames, partitionNames);
- return client.get_partitions_statistics_req(partitionsStatsRequest).getPartStats();
+ return client.getPartitionsStatisticsReq(partitionsStatsRequest).getPartStats();
}
@Override
@@ -340,7 +336,7 @@ public void setPartitionColumnStatistics(String databaseName, String tableName,
ColumnStatisticsDesc statisticsDescription = new ColumnStatisticsDesc(false, databaseName, tableName);
statisticsDescription.setPartName(partitionName);
ColumnStatistics request = new ColumnStatistics(statisticsDescription, stats);
- client.update_partition_column_statistics(request);
+ client.updatePartitionColumnStatistics(request);
});
}
@@ -348,7 +344,7 @@ public void setPartitionColumnStatistics(String databaseName, String tableName,
public void deletePartitionColumnStatistics(String databaseName, String tableName, String partitionName, String columnName)
throws TException
{
- client.delete_partition_column_statistics(databaseName, tableName, partitionName, columnName);
+ client.deletePartitionColumnStatistics(databaseName, tableName, partitionName, columnName);
}
private void setColumnStatistics(String objectName, List statistics, UnaryCall> saveColumnStatistics)
@@ -403,70 +399,70 @@ private void setColumnStatistics(String objectName, List st
public List getPartitionNames(String databaseName, String tableName)
throws TException
{
- return client.get_partition_names(databaseName, tableName, (short) -1);
+ return client.getPartitionNames(databaseName, tableName, (short) -1);
}
@Override
public List getPartitionNamesFiltered(String databaseName, String tableName, List partitionValues)
throws TException
{
- return client.get_partition_names_ps(databaseName, tableName, partitionValues, (short) -1);
+ return client.getPartitionNamesPs(databaseName, tableName, partitionValues, (short) -1);
}
@Override
public int addPartitions(List newPartitions)
throws TException
{
- return client.add_partitions(newPartitions);
+ return client.addPartitions(newPartitions);
}
@Override
public boolean dropPartition(String databaseName, String tableName, List partitionValues, boolean deleteData)
throws TException
{
- return client.drop_partition(databaseName, tableName, partitionValues, deleteData);
+ return client.dropPartition(databaseName, tableName, partitionValues, deleteData);
}
@Override
public void alterPartition(String databaseName, String tableName, Partition partition)
throws TException
{
- client.alter_partition(databaseName, tableName, partition);
+ client.alterPartition(databaseName, tableName, partition);
}
@Override
public Partition getPartition(String databaseName, String tableName, List partitionValues)
throws TException
{
- return client.get_partition(databaseName, tableName, partitionValues);
+ return client.getPartition(databaseName, tableName, partitionValues);
}
@Override
public List getPartitionsByNames(String databaseName, String tableName, List partitionNames)
throws TException
{
- return client.get_partitions_by_names(databaseName, tableName, partitionNames);
+ return client.getPartitionsByNames(databaseName, tableName, partitionNames);
}
@Override
public List listRoles(String principalName, PrincipalType principalType)
throws TException
{
- return client.list_roles(principalName, principalType);
+ return client.listRoles(principalName, principalType);
}
@Override
public List listPrivileges(String principalName, PrincipalType principalType, HiveObjectRef hiveObjectRef)
throws TException
{
- return client.list_privileges(principalName, principalType, hiveObjectRef);
+ return client.listPrivileges(principalName, principalType, hiveObjectRef);
}
@Override
public List getRoleNames()
throws TException
{
- return client.get_role_names();
+ return client.getRoleNames();
}
@Override
@@ -474,21 +470,21 @@ public void createRole(String roleName, String grantor)
throws TException
{
Role role = new Role(roleName, 0, grantor);
- client.create_role(role);
+ client.createRole(role);
}
@Override
public void dropRole(String role)
throws TException
{
- client.drop_role(role);
+ client.dropRole(role);
}
@Override
public boolean grantPrivileges(PrivilegeBag privilegeBag)
throws TException
{
- return client.grant_revoke_privileges(new GrantRevokePrivilegeRequest(GRANT, privilegeBag)).isSuccess();
+ return client.grantRevokePrivileges(new GrantRevokePrivilegeRequest(GRANT, privilegeBag)).isSuccess();
}
@Override
@@ -497,7 +493,7 @@ public boolean revokePrivileges(PrivilegeBag privilegeBag, boolean revokeGrantOp
{
GrantRevokePrivilegeRequest grantRevokePrivilegeRequest = new GrantRevokePrivilegeRequest(REVOKE, privilegeBag);
grantRevokePrivilegeRequest.setRevokeGrantOption(revokeGrantOption);
- return client.grant_revoke_privileges(grantRevokePrivilegeRequest).isSuccess();
+ return client.grantRevokePrivileges(grantRevokePrivilegeRequest).isSuccess();
}
@Override
@@ -530,7 +526,7 @@ private void createGrant(String role, String granteeName, PrincipalType granteeT
request.setGrantor(grantorName);
request.setGrantorType(grantorType);
request.setGrantOption(grantOption);
- GrantRevokeRoleResponse response = client.grant_revoke_role(request);
+ GrantRevokeRoleResponse response = client.grantRevokeRole(request);
if (!response.isSetSuccess()) {
throw new MetaException("GrantRevokeResponse missing success field");
}
@@ -569,7 +565,7 @@ private void removeGrant(String role, String granteeName, PrincipalType granteeT
request.setPrincipalName(granteeName);
request.setPrincipalType(granteeType);
request.setGrantOption(grantOption);
- GrantRevokeRoleResponse response = client.grant_revoke_role(request);
+ GrantRevokeRoleResponse response = client.grantRevokeRole(request);
if (!response.isSetSuccess()) {
throw new MetaException("GrantRevokeResponse missing success field");
}
@@ -580,7 +576,7 @@ public List listGrantedPrincipals(String role)
throws TException
{
GetPrincipalsInRoleRequest request = new GetPrincipalsInRoleRequest(role);
- GetPrincipalsInRoleResponse response = client.get_principals_in_role(request);
+ GetPrincipalsInRoleResponse response = client.getPrincipalsInRole(request);
return ImmutableList.copyOf(response.getPrincipalGrants());
}
@@ -589,7 +585,7 @@ public List listRoleGrants(String principalName, PrincipalTy
throws TException
{
GetRoleGrantsForPrincipalRequest request = new GetRoleGrantsForPrincipalRequest(principalName, principalType);
- GetRoleGrantsForPrincipalResponse resp = client.get_role_grants_for_principal(request);
+ GetRoleGrantsForPrincipalResponse resp = client.getRoleGrantsForPrincipal(request);
return ImmutableList.copyOf(resp.getPrincipalGrants());
}
@@ -597,7 +593,7 @@ public List listRoleGrants(String principalName, PrincipalTy
public void setUGI(String userName)
throws TException
{
- client.set_ugi(userName, new ArrayList<>());
+ client.setUgi(userName, new ArrayList<>());
}
@Override
@@ -605,21 +601,21 @@ public long openTransaction(String user)
throws TException
{
OpenTxnRequest request = new OpenTxnRequest(1, user, hostname);
- return client.open_txns(request).getTxn_ids().get(0);
+ return client.openTxns(request).getTxnIds().get(0);
}
@Override
public void commitTransaction(long transactionId)
throws TException
{
- client.commit_txn(new CommitTxnRequest(transactionId));
+ client.commitTxn(new CommitTxnRequest(transactionId));
}
@Override
public void abortTransaction(long transactionId)
throws TException
{
- client.abort_txn(new AbortTxnRequest(transactionId));
+ client.abortTxn(new AbortTxnRequest(transactionId));
}
@Override
@@ -627,7 +623,7 @@ public void sendTransactionHeartbeat(long transactionId)
throws TException
{
HeartbeatTxnRangeRequest request = new HeartbeatTxnRangeRequest(transactionId, transactionId);
- client.heartbeat_txn_range(request);
+ client.heartbeatTxnRange(request);
}
@Override
@@ -641,7 +637,7 @@ public LockResponse acquireLock(LockRequest lockRequest)
public LockResponse checkLock(long lockId)
throws TException
{
- return client.check_lock(new CheckLockRequest(lockId));
+ return client.checkLock(new CheckLockRequest(lockId));
}
@Override
@@ -658,26 +654,24 @@ public String getValidWriteIds(List tableList, long currentTransactionId
// Pass currentTxn as 0L to get the recent snapshot of valid transactions in Hive
// Do not pass currentTransactionId instead as it will break Hive's listing of delta directories if major compaction
// deletes delta directories for valid transactions that existed at the time transaction is opened
- ValidTxnList validTransactions = TxnUtils.createValidReadTxnList(client.get_open_txns(), 0L);
- GetValidWriteIdsRequest request = new GetValidWriteIdsRequest(tableList, validTransactions.toString());
- return createValidTxnWriteIdList(
- currentTransactionId,
- client.get_valid_write_ids(request).getTblValidWriteIds())
- .toString();
+ String validTransactions = createValidReadTxnList(client.getOpenTxns(), 0L);
+ GetValidWriteIdsRequest request = new GetValidWriteIdsRequest(tableList, validTransactions);
+ List validWriteIds = client.getValidWriteIds(request).getTblValidWriteIds();
+ return createValidTxnWriteIdList(currentTransactionId, validWriteIds);
}
@Override
public String getConfigValue(String name, String defaultValue)
throws TException
{
- return client.get_config_value(name, defaultValue);
+ return client.getConfigValue(name, defaultValue);
}
@Override
public String getDelegationToken(String userName)
throws TException
{
- return client.get_delegation_token(userName, userName);
+ return client.getDelegationToken(userName, userName);
}
@Override
@@ -686,7 +680,7 @@ public List allocateTableWriteIds(String dbName, String tableName,
{
AllocateTableWriteIdsRequest request = new AllocateTableWriteIdsRequest(dbName, tableName);
request.setTxnIds(transactionIds);
- AllocateTableWriteIdsResponse response = client.allocate_table_write_ids(request);
+ AllocateTableWriteIdsResponse response = client.allocateTableWriteIds(request);
return response.getTxnToWriteIds();
}
@@ -700,11 +694,11 @@ public void alterPartitions(String dbName, String tableName, List par
() -> {
AlterPartitionsRequest request = new AlterPartitionsRequest(dbName, tableName, partitions);
request.setWriteId(writeId);
- client.alter_partitions_req(request);
+ client.alterPartitionsReq(request);
return null;
},
() -> {
- client.alter_partitions_with_environment_context(dbName, tableName, partitions, new EnvironmentContext());
+ client.alterPartitionsWithEnvironmentContext(dbName, tableName, partitions, new EnvironmentContext());
return null;
});
}
@@ -715,7 +709,7 @@ public void addDynamicPartitions(String dbName, String tableName, List p
{
AddDynamicPartitions request = new AddDynamicPartitions(transactionId, writeId, dbName, tableName, partitionNames);
request.setOperationType(operation.getMetastoreOperationType().orElseThrow());
- client.add_dynamic_partitions(request);
+ client.addDynamicPartitions(request);
}
@Override
@@ -733,12 +727,12 @@ public void alterTransactionalTable(Table table, long transactionId, long writeI
request.setValidWriteIdList(getValidWriteIds(ImmutableList.of(format("%s.%s", table.getDbName(), table.getTableName())), transactionId));
request.setWriteId(writeId);
request.setEnvironmentContext(environmentContext);
- client.alter_table_req(request);
+ client.alterTableReq(request);
return null;
},
() -> {
table.setWriteId(originalWriteId);
- client.alter_table_with_environment_context(table.getDbName(), table.getTableName(), table, environmentContext);
+ client.alterTableWithEnvironmentContext(table.getDbName(), table.getTableName(), table, environmentContext);
return null;
});
}
@@ -801,7 +795,7 @@ private static boolean defaultIsValidExceptionalResponse(Exception exception)
}
if (exception.toString().contains("AccessControlException")) {
- // e.g. org.apache.hadoop.hive.metastore.api.MetaException: org.apache.hadoop.security.AccessControlException: Permission denied: ...
+ // e.g. io.trino.hive.thrift.metastore.MetaException: org.apache.hadoop.security.AccessControlException: Permission denied: ...
return true;
}
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastore.java
index ab21a08c00ee..97aa01ac3c26 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastore.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastore.java
@@ -13,6 +13,11 @@
*/
package io.trino.plugin.hive.metastore.thrift;
+import io.trino.hive.thrift.metastore.DataOperationType;
+import io.trino.hive.thrift.metastore.Database;
+import io.trino.hive.thrift.metastore.FieldSchema;
+import io.trino.hive.thrift.metastore.Partition;
+import io.trino.hive.thrift.metastore.Table;
import io.trino.plugin.hive.HiveColumnStatisticType;
import io.trino.plugin.hive.HivePartition;
import io.trino.plugin.hive.PartitionStatistics;
@@ -29,11 +34,6 @@
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.type.Type;
-import org.apache.hadoop.hive.metastore.api.DataOperationType;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.Table;
import java.util.List;
import java.util.Map;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreApiStats.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreApiStats.java
index 80bbf67f5d9a..b6ecc2ee5d68 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreApiStats.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreApiStats.java
@@ -15,7 +15,7 @@
import io.airlift.stats.CounterStat;
import io.airlift.stats.TimeStat;
-import org.apache.hadoop.hive.metastore.api.MetaException;
+import io.trino.hive.thrift.metastore.MetaException;
import org.apache.thrift.TBase;
import org.apache.thrift.TException;
import org.weakref.jmx.Managed;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreClient.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreClient.java
index 3dcc9e7885bb..48510dfb6550 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreClient.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreClient.java
@@ -13,22 +13,22 @@
*/
package io.trino.plugin.hive.metastore.thrift;
+import io.trino.hive.thrift.metastore.ColumnStatisticsObj;
+import io.trino.hive.thrift.metastore.Database;
+import io.trino.hive.thrift.metastore.EnvironmentContext;
+import io.trino.hive.thrift.metastore.FieldSchema;
+import io.trino.hive.thrift.metastore.HiveObjectPrivilege;
+import io.trino.hive.thrift.metastore.HiveObjectRef;
+import io.trino.hive.thrift.metastore.LockRequest;
+import io.trino.hive.thrift.metastore.LockResponse;
+import io.trino.hive.thrift.metastore.Partition;
+import io.trino.hive.thrift.metastore.PrincipalType;
+import io.trino.hive.thrift.metastore.PrivilegeBag;
+import io.trino.hive.thrift.metastore.Role;
+import io.trino.hive.thrift.metastore.RolePrincipalGrant;
+import io.trino.hive.thrift.metastore.Table;
+import io.trino.hive.thrift.metastore.TxnToWriteId;
import io.trino.plugin.hive.acid.AcidOperation;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
-import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
-import org.apache.hadoop.hive.metastore.api.LockRequest;
-import org.apache.hadoop.hive.metastore.api.LockResponse;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
-import org.apache.hadoop.hive.metastore.api.Role;
-import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.api.TxnToWriteId;
import org.apache.thrift.TException;
import java.io.Closeable;
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java
index 7f673bec83be..ad710658384b 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/ThriftMetastoreUtil.java
@@ -19,6 +19,23 @@
import com.google.common.collect.Streams;
import com.google.common.primitives.Longs;
import com.google.common.primitives.Shorts;
+import io.trino.hive.thrift.metastore.BinaryColumnStatsData;
+import io.trino.hive.thrift.metastore.BooleanColumnStatsData;
+import io.trino.hive.thrift.metastore.ColumnStatisticsObj;
+import io.trino.hive.thrift.metastore.Date;
+import io.trino.hive.thrift.metastore.DateColumnStatsData;
+import io.trino.hive.thrift.metastore.Decimal;
+import io.trino.hive.thrift.metastore.DecimalColumnStatsData;
+import io.trino.hive.thrift.metastore.DoubleColumnStatsData;
+import io.trino.hive.thrift.metastore.FieldSchema;
+import io.trino.hive.thrift.metastore.LongColumnStatsData;
+import io.trino.hive.thrift.metastore.Order;
+import io.trino.hive.thrift.metastore.PrincipalPrivilegeSet;
+import io.trino.hive.thrift.metastore.PrivilegeGrantInfo;
+import io.trino.hive.thrift.metastore.RolePrincipalGrant;
+import io.trino.hive.thrift.metastore.SerDeInfo;
+import io.trino.hive.thrift.metastore.StorageDescriptor;
+import io.trino.hive.thrift.metastore.StringColumnStatsData;
import io.trino.plugin.hive.HiveBasicStatistics;
import io.trino.plugin.hive.HiveBucketProperty;
import io.trino.plugin.hive.HiveColumnStatisticType;
@@ -48,23 +65,6 @@
import io.trino.spi.type.TimestampType;
import io.trino.spi.type.Type;
import io.trino.spi.type.VarcharType;
-import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
-import org.apache.hadoop.hive.metastore.api.Date;
-import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.Decimal;
-import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
-import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
-import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -94,6 +94,13 @@
import static com.google.common.base.Strings.nullToEmpty;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.binaryStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.booleanStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.dateStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.decimalStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.doubleStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.longStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.stringStats;
import static io.trino.plugin.hive.HiveColumnStatisticType.MAX_VALUE;
import static io.trino.plugin.hive.HiveColumnStatisticType.MAX_VALUE_SIZE_IN_BYTES;
import static io.trino.plugin.hive.HiveColumnStatisticType.MIN_VALUE;
@@ -133,13 +140,6 @@
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.binaryStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.booleanStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.dateStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.decimalStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.doubleStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.longStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.stringStats;
import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category.PRIMITIVE;
public final class ThriftMetastoreUtil
@@ -154,9 +154,9 @@ public final class ThriftMetastoreUtil
private ThriftMetastoreUtil() {}
- public static org.apache.hadoop.hive.metastore.api.Database toMetastoreApiDatabase(Database database)
+ public static io.trino.hive.thrift.metastore.Database toMetastoreApiDatabase(Database database)
{
- org.apache.hadoop.hive.metastore.api.Database result = new org.apache.hadoop.hive.metastore.api.Database();
+ io.trino.hive.thrift.metastore.Database result = new io.trino.hive.thrift.metastore.Database();
result.setName(database.getDatabaseName());
database.getLocation().ifPresent(result::setLocationUri);
result.setOwnerName(database.getOwnerName().orElse(null));
@@ -167,16 +167,16 @@ public static org.apache.hadoop.hive.metastore.api.Database toMetastoreApiDataba
return result;
}
- public static org.apache.hadoop.hive.metastore.api.Table toMetastoreApiTable(Table table, PrincipalPrivileges privileges)
+ public static io.trino.hive.thrift.metastore.Table toMetastoreApiTable(Table table, PrincipalPrivileges privileges)
{
- org.apache.hadoop.hive.metastore.api.Table result = toMetastoreApiTable(table);
+ io.trino.hive.thrift.metastore.Table result = toMetastoreApiTable(table);
result.setPrivileges(toMetastoreApiPrincipalPrivilegeSet(privileges));
return result;
}
- public static org.apache.hadoop.hive.metastore.api.Table toMetastoreApiTable(Table table)
+ public static io.trino.hive.thrift.metastore.Table toMetastoreApiTable(Table table)
{
- org.apache.hadoop.hive.metastore.api.Table result = new org.apache.hadoop.hive.metastore.api.Table();
+ io.trino.hive.thrift.metastore.Table result = new io.trino.hive.thrift.metastore.Table();
result.setDbName(table.getDatabaseName());
result.setTableName(table.getTableName());
result.setOwner(table.getOwner().orElse(null));
@@ -323,21 +323,21 @@ public static Stream listEnabledRoles(ConnectorIdentity identity, Functi
.distinct();
}
- public static org.apache.hadoop.hive.metastore.api.Partition toMetastoreApiPartition(PartitionWithStatistics partitionWithStatistics)
+ public static io.trino.hive.thrift.metastore.Partition toMetastoreApiPartition(PartitionWithStatistics partitionWithStatistics)
{
- org.apache.hadoop.hive.metastore.api.Partition partition = toMetastoreApiPartition(partitionWithStatistics.getPartition());
+ io.trino.hive.thrift.metastore.Partition partition = toMetastoreApiPartition(partitionWithStatistics.getPartition());
partition.setParameters(updateStatisticsParameters(partition.getParameters(), partitionWithStatistics.getStatistics().getBasicStatistics()));
return partition;
}
- public static org.apache.hadoop.hive.metastore.api.Partition toMetastoreApiPartition(Partition partition)
+ public static io.trino.hive.thrift.metastore.Partition toMetastoreApiPartition(Partition partition)
{
return toMetastoreApiPartition(partition, Optional.empty());
}
- public static org.apache.hadoop.hive.metastore.api.Partition toMetastoreApiPartition(Partition partition, Optional writeId)
+ public static io.trino.hive.thrift.metastore.Partition toMetastoreApiPartition(Partition partition, Optional writeId)
{
- org.apache.hadoop.hive.metastore.api.Partition result = new org.apache.hadoop.hive.metastore.api.Partition();
+ io.trino.hive.thrift.metastore.Partition result = new io.trino.hive.thrift.metastore.Partition();
result.setDbName(partition.getDatabaseName());
result.setTableName(partition.getTableName());
result.setValues(partition.getValues());
@@ -347,7 +347,7 @@ public static org.apache.hadoop.hive.metastore.api.Partition toMetastoreApiParti
return result;
}
- public static Database fromMetastoreApiDatabase(org.apache.hadoop.hive.metastore.api.Database database)
+ public static Database fromMetastoreApiDatabase(io.trino.hive.thrift.metastore.Database database)
{
String ownerName = "PUBLIC";
PrincipalType ownerType = ROLE;
@@ -371,7 +371,7 @@ public static Database fromMetastoreApiDatabase(org.apache.hadoop.hive.metastore
.build();
}
- public static Table fromMetastoreApiTable(org.apache.hadoop.hive.metastore.api.Table table)
+ public static Table fromMetastoreApiTable(io.trino.hive.thrift.metastore.Table table)
{
StorageDescriptor storageDescriptor = table.getSd();
if (storageDescriptor == null) {
@@ -380,7 +380,7 @@ public static Table fromMetastoreApiTable(org.apache.hadoop.hive.metastore.api.T
return fromMetastoreApiTable(table, storageDescriptor.getCols());
}
- public static Table fromMetastoreApiTable(org.apache.hadoop.hive.metastore.api.Table table, List schema)
+ public static Table fromMetastoreApiTable(io.trino.hive.thrift.metastore.Table table, List schema)
{
StorageDescriptor storageDescriptor = table.getSd();
if (storageDescriptor == null) {
@@ -408,7 +408,7 @@ public static Table fromMetastoreApiTable(org.apache.hadoop.hive.metastore.api.T
return tableBuilder.build();
}
- public static boolean isAvroTableWithSchemaSet(org.apache.hadoop.hive.metastore.api.Table table)
+ public static boolean isAvroTableWithSchemaSet(io.trino.hive.thrift.metastore.Table table)
{
if (table.getParameters() == null) {
return false;
@@ -423,7 +423,7 @@ public static boolean isAvroTableWithSchemaSet(org.apache.hadoop.hive.metastore.
serdeInfo.getSerializationLib().equals(AVRO.getSerde());
}
- public static boolean isCsvTable(org.apache.hadoop.hive.metastore.api.Table table)
+ public static boolean isCsvTable(io.trino.hive.thrift.metastore.Table table)
{
return CSV.getSerde().equals(getSerdeInfo(table).getSerializationLib());
}
@@ -435,7 +435,7 @@ public static List csvSchemaFields(List schemas)
.collect(toImmutableList());
}
- private static SerDeInfo getSerdeInfo(org.apache.hadoop.hive.metastore.api.Table table)
+ private static SerDeInfo getSerdeInfo(io.trino.hive.thrift.metastore.Table table)
{
StorageDescriptor storageDescriptor = table.getSd();
if (storageDescriptor == null) {
@@ -449,7 +449,7 @@ private static SerDeInfo getSerdeInfo(org.apache.hadoop.hive.metastore.api.Table
return serdeInfo;
}
- public static Partition fromMetastoreApiPartition(org.apache.hadoop.hive.metastore.api.Partition partition)
+ public static Partition fromMetastoreApiPartition(io.trino.hive.thrift.metastore.Partition partition)
{
StorageDescriptor storageDescriptor = partition.getSd();
if (storageDescriptor == null) {
@@ -459,7 +459,7 @@ public static Partition fromMetastoreApiPartition(org.apache.hadoop.hive.metasto
return fromMetastoreApiPartition(partition, storageDescriptor.getCols());
}
- public static Partition fromMetastoreApiPartition(org.apache.hadoop.hive.metastore.api.Partition partition, List schema)
+ public static Partition fromMetastoreApiPartition(io.trino.hive.thrift.metastore.Partition partition, List schema)
{
StorageDescriptor storageDescriptor = partition.getSd();
if (storageDescriptor == null) {
@@ -639,15 +639,15 @@ private static RoleGrant fromRolePrincipalGrant(RolePrincipalGrant grant)
grant.isGrantOption());
}
- public static org.apache.hadoop.hive.metastore.api.PrincipalType fromTrinoPrincipalType(PrincipalType principalType)
+ public static io.trino.hive.thrift.metastore.PrincipalType fromTrinoPrincipalType(PrincipalType principalType)
{
return switch (principalType) {
- case USER -> org.apache.hadoop.hive.metastore.api.PrincipalType.USER;
- case ROLE -> org.apache.hadoop.hive.metastore.api.PrincipalType.ROLE;
+ case USER -> io.trino.hive.thrift.metastore.PrincipalType.USER;
+ case ROLE -> io.trino.hive.thrift.metastore.PrincipalType.ROLE;
};
}
- public static PrincipalType fromMetastoreApiPrincipalType(org.apache.hadoop.hive.metastore.api.PrincipalType principalType)
+ public static PrincipalType fromMetastoreApiPrincipalType(io.trino.hive.thrift.metastore.PrincipalType principalType)
{
requireNonNull(principalType, "principalType is null");
switch (principalType) {
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/Transport.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/Transport.java
index 523ca5e24914..f2f62cf47426 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/Transport.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/Transport.java
@@ -96,59 +96,16 @@ private static TTransportException rewriteException(TTransportException e, HostA
}
private static class TTransportWrapper
- extends TTransport
+ extends TFilterTransport
{
- private final TTransport transport;
private final HostAndPort address;
- TTransportWrapper(TTransport transport, HostAndPort address)
+ public TTransportWrapper(TTransport transport, HostAndPort address)
{
- this.transport = requireNonNull(transport, "transport is null");
+ super(transport);
this.address = requireNonNull(address, "address is null");
}
- @Override
- public boolean isOpen()
- {
- return transport.isOpen();
- }
-
- @Override
- public boolean peek()
- {
- return transport.peek();
- }
-
- @Override
- public byte[] getBuffer()
- {
- return transport.getBuffer();
- }
-
- @Override
- public int getBufferPosition()
- {
- return transport.getBufferPosition();
- }
-
- @Override
- public int getBytesRemainingInBuffer()
- {
- return transport.getBytesRemainingInBuffer();
- }
-
- @Override
- public void consumeBuffer(int len)
- {
- transport.consumeBuffer(len);
- }
-
- @Override
- public void close()
- {
- transport.close();
- }
-
@Override
public void open()
throws TTransportException
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TxnUtils.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TxnUtils.java
new file mode 100644
index 000000000000..0565820b7381
--- /dev/null
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/thrift/TxnUtils.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.hive.metastore.thrift;
+
+import io.trino.hive.thrift.metastore.GetOpenTxnsResponse;
+import io.trino.hive.thrift.metastore.TableValidWriteIds;
+
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.StringJoiner;
+
+import static java.lang.Math.abs;
+import static java.lang.Math.min;
+import static java.util.Collections.binarySearch;
+import static java.util.Objects.requireNonNullElse;
+
+// based on org.apache.hadoop.hive.metastore.txn.TxnUtils
+public final class TxnUtils
+{
+ private TxnUtils() {}
+
+ public static String createValidReadTxnList(GetOpenTxnsResponse txns, long currentTxn)
+ {
+ List openTxns = txns.getOpenTxns();
+ int sizeToHwm = (currentTxn > 0) ? binarySearch(openTxns, currentTxn) : openTxns.size();
+ long[] exceptions = new long[abs(sizeToHwm)];
+ BitSet inAbortedBits = BitSet.valueOf(txns.getAbortedBits());
+ BitSet outAbortedBits = new BitSet();
+ long minOpenTxnId = Long.MAX_VALUE;
+ int i = 0;
+ for (long txn : openTxns) {
+ if ((currentTxn > 0) && (txn >= currentTxn)) {
+ break;
+ }
+ if (inAbortedBits.get(i)) {
+ outAbortedBits.set(i);
+ }
+ else if (minOpenTxnId == Long.MAX_VALUE) {
+ minOpenTxnId = txn;
+ }
+ exceptions[i] = txn;
+ i++;
+ }
+
+ long highWaterMark = (currentTxn > 0) ? min(currentTxn, txns.getTxnHighWaterMark()) : txns.getTxnHighWaterMark();
+
+ StringBuilder builder = new StringBuilder()
+ .append(highWaterMark)
+ .append(':')
+ .append((exceptions.length > 0) ? minOpenTxnId : Long.MAX_VALUE);
+
+ appendTransactions(builder, exceptions, outAbortedBits);
+
+ return builder.toString();
+ }
+
+ public static String createValidTxnWriteIdList(long currentTxnId, List validIds)
+ {
+ Map lists = new HashMap<>();
+ for (TableValidWriteIds ids : validIds) {
+ lists.put(ids.getFullTableName(), createValidWriteIdList(ids));
+ }
+
+ StringJoiner joiner = new StringJoiner("$")
+ .add(String.valueOf(currentTxnId));
+ // the result depends on HashMap iteration order (matches Hive behavior)
+ lists.values().forEach(joiner::add);
+ return joiner.toString();
+ }
+
+ public static String createValidWriteIdList(TableValidWriteIds writeIds)
+ {
+ List invalids = writeIds.getInvalidWriteIds();
+ BitSet abortedBits = BitSet.valueOf(writeIds.getAbortedBits());
+ long[] exceptions = new long[invalids.size()];
+ int i = 0;
+ for (long writeId : invalids) {
+ exceptions[i] = writeId;
+ i++;
+ }
+
+ long minOpenWriteId = writeIds.isSetMinOpenWriteId() ? writeIds.getMinOpenWriteId() : Long.MAX_VALUE;
+
+ StringBuilder builder = new StringBuilder()
+ .append(requireNonNullElse(writeIds.getFullTableName(), "null"))
+ .append(':').append(writeIds.getWriteIdHighWaterMark())
+ .append(':').append((exceptions.length > 0) ? minOpenWriteId : Long.MAX_VALUE);
+
+ appendTransactions(builder, exceptions, abortedBits);
+
+ return builder.toString();
+ }
+
+ private static void appendTransactions(StringBuilder builder, long[] exceptions, BitSet abortedBits)
+ {
+ StringBuilder open = new StringBuilder();
+ StringBuilder abort = new StringBuilder();
+ for (int i = 0; i < exceptions.length; i++) {
+ if (abortedBits.get(i)) {
+ if (!abort.isEmpty()) {
+ abort.append(',');
+ }
+ abort.append(exceptions[i]);
+ }
+ else {
+ if (!open.isEmpty()) {
+ open.append(',');
+ }
+ open.append(exceptions[i]);
+ }
+ }
+ builder.append(':').append(open);
+ builder.append(':').append(abort);
+ }
+}
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveBucketing.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveBucketing.java
index d3068ad8867c..711d8a8e67bf 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveBucketing.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveBucketing.java
@@ -54,6 +54,7 @@
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.Lists.cartesianProduct;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.TABLE_BUCKETING_VERSION;
import static io.trino.plugin.hive.HiveColumnHandle.BUCKET_COLUMN_NAME;
import static io.trino.plugin.hive.HiveErrorCode.HIVE_INVALID_METADATA;
import static io.trino.plugin.hive.HiveSessionProperties.getTimestampPrecision;
@@ -64,7 +65,6 @@
import static java.lang.String.format;
import static java.util.Map.Entry;
import static java.util.function.Function.identity;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.TABLE_BUCKETING_VERSION;
public final class HiveBucketing
{
diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java
index efc5899c6965..aae17e135c8f 100644
--- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java
+++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/util/HiveUtil.java
@@ -106,6 +106,7 @@
import static io.airlift.slice.Slices.utf8Slice;
import static io.trino.hdfs.ConfigurationUtils.copy;
import static io.trino.hdfs.ConfigurationUtils.toJobConf;
+import static io.trino.hive.thrift.metastore.hive_metastoreConstants.FILE_INPUT_FORMAT;
import static io.trino.plugin.hive.HiveColumnHandle.ColumnType.PARTITION_KEY;
import static io.trino.plugin.hive.HiveColumnHandle.ColumnType.REGULAR;
import static io.trino.plugin.hive.HiveColumnHandle.bucketColumnHandle;
@@ -171,7 +172,6 @@
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.joining;
-import static org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.FILE_INPUT_FORMAT;
import static org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS;
import static org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR;
import static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveAnalyzeCorruptStatistics.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveAnalyzeCorruptStatistics.java
index 64f2a1b66afa..b11bd60d836c 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveAnalyzeCorruptStatistics.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveAnalyzeCorruptStatistics.java
@@ -57,7 +57,7 @@ public void testAnalyzeCorruptColumnStatisticsOnEmptyTable()
// ANALYZE and drop_stats are unsupported for tables having broken column statistics
assertThatThrownBy(() -> query("ANALYZE " + tableName))
- .hasMessage(hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint().toString()) // Thrift metastore doesn't throw helpful message
+ .hasMessage("%s: Socket is closed by peer.", hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint())
.hasStackTraceContaining("ThriftHiveMetastore.setTableColumnStatistics");
assertThatThrownBy(() -> query("CALL system.drop_stats('tpch', '" + tableName + "')"))
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/TestMetastoreUtil.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/TestMetastoreUtil.java
index f7158cd753af..6fa47eb8d48a 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/TestMetastoreUtil.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/TestMetastoreUtil.java
@@ -15,22 +15,22 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
+import io.trino.hive.thrift.metastore.FieldSchema;
+import io.trino.hive.thrift.metastore.Order;
+import io.trino.hive.thrift.metastore.PrincipalPrivilegeSet;
+import io.trino.hive.thrift.metastore.SerDeInfo;
+import io.trino.hive.thrift.metastore.SkewedInfo;
+import io.trino.hive.thrift.metastore.StorageDescriptor;
import io.trino.plugin.hive.HiveColumnHandle;
import io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil;
import io.trino.spi.predicate.Domain;
import io.trino.spi.predicate.Range;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.predicate.ValueSet;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Order;
-import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.SkewedInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.testng.annotations.Test;
import java.util.List;
+import java.util.Map;
import java.util.Optional;
import java.util.Properties;
@@ -42,7 +42,6 @@
import static io.trino.plugin.hive.metastore.PrincipalPrivileges.NO_PRIVILEGES;
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.VarcharType.VARCHAR;
-import static org.apache.hadoop.hive.serde.serdeConstants.COLUMN_NAME_DELIMITER;
import static org.assertj.core.api.Assertions.assertThat;
import static org.testng.Assert.assertEquals;
@@ -63,7 +62,7 @@ public class TestMetastoreUtil
ImmutableList.of("col2", "col3"),
ImmutableList.of(new Order("col2", 1)),
ImmutableMap.of());
- private static final org.apache.hadoop.hive.metastore.api.Table TEST_TABLE = new org.apache.hadoop.hive.metastore.api.Table(
+ private static final io.trino.hive.thrift.metastore.Table TEST_TABLE = new io.trino.hive.thrift.metastore.Table(
"table_name",
"db_name",
"owner_name",
@@ -83,7 +82,7 @@ public class TestMetastoreUtil
TEST_TABLE.setPrivileges(new PrincipalPrivilegeSet(ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of()));
}
- private static final org.apache.hadoop.hive.metastore.api.Partition TEST_PARTITION = new org.apache.hadoop.hive.metastore.api.Partition(
+ private static final io.trino.hive.thrift.metastore.Partition TEST_PARTITION = new io.trino.hive.thrift.metastore.Partition(
ImmutableList.of("pk1v", "pk2v"),
"db_name",
"table_name",
@@ -102,7 +101,7 @@ public class TestMetastoreUtil
ImmutableList.of("col2", "col3"),
ImmutableList.of(new Order("col2", 0), new Order("col3", 1)),
ImmutableMap.of("sk1", "sv1"));
- private static final org.apache.hadoop.hive.metastore.api.Table TEST_TABLE_WITH_UNSUPPORTED_FIELDS = new org.apache.hadoop.hive.metastore.api.Table(
+ private static final io.trino.hive.thrift.metastore.Table TEST_TABLE_WITH_UNSUPPORTED_FIELDS = new io.trino.hive.thrift.metastore.Table(
"table_name",
"db_name",
"owner_name",
@@ -117,7 +116,7 @@ public class TestMetastoreUtil
"view original text",
"view extended text",
"MANAGED_TABLE");
- private static final org.apache.hadoop.hive.metastore.api.Partition TEST_PARTITION_WITH_UNSUPPORTED_FIELDS = new org.apache.hadoop.hive.metastore.api.Partition(
+ private static final io.trino.hive.thrift.metastore.Partition TEST_PARTITION_WITH_UNSUPPORTED_FIELDS = new io.trino.hive.thrift.metastore.Partition(
ImmutableList.of("pk1v", "pk2v"),
"db_name",
"table_name",
@@ -133,11 +132,35 @@ public class TestMetastoreUtil
ImmutableMap.of(ImmutableList.of("val1"), "loc1")));
}
+ // equivalent code:
+ // Properties expected = MetaStoreUtils.getTableMetadata(TEST_TABLE_WITH_UNSUPPORTED_FIELDS);
+ // expected.remove(COLUMN_NAME_DELIMITER);
+ private static final Map TEST_TABLE_METADATA = ImmutableMap.builder()
+ .put("bucket_count", "100")
+ .put("bucket_field_name", "col2,col3")
+ .put("columns", "col1,col2,col3")
+ .put("columns.comments", "comment1\0\0")
+ .put("columns.types", "bigint:binary:string")
+ .put("file.inputformat", "com.facebook.hive.orc.OrcInputFormat")
+ .put("file.outputformat", "com.facebook.hive.orc.OrcOutputFormat")
+ .put("k1", "v1")
+ .put("k2", "v2")
+ .put("k3", "v3")
+ .put("location", "hdfs://VOL1:9000/db_name/table_name")
+ .put("name", "db_name.table_name")
+ .put("partition_columns", "pk1/pk2")
+ .put("partition_columns.types", "string:string")
+ .put("sdk1", "sdv1")
+ .put("sdk2", "sdv2")
+ .put("serialization.ddl", "struct table_name { i64 col1, binary col2, string col3}")
+ .put("serialization.lib", "com.facebook.hive.orc.OrcSerde")
+ .buildOrThrow();
+
@Test
public void testTableRoundTrip()
{
Table table = ThriftMetastoreUtil.fromMetastoreApiTable(TEST_TABLE, TEST_SCHEMA);
- org.apache.hadoop.hive.metastore.api.Table metastoreApiTable = ThriftMetastoreUtil.toMetastoreApiTable(table, NO_PRIVILEGES);
+ io.trino.hive.thrift.metastore.Table metastoreApiTable = ThriftMetastoreUtil.toMetastoreApiTable(table, NO_PRIVILEGES);
assertEquals(metastoreApiTable, TEST_TABLE);
}
@@ -145,26 +168,22 @@ public void testTableRoundTrip()
public void testPartitionRoundTrip()
{
Partition partition = ThriftMetastoreUtil.fromMetastoreApiPartition(TEST_PARTITION);
- org.apache.hadoop.hive.metastore.api.Partition metastoreApiPartition = ThriftMetastoreUtil.toMetastoreApiPartition(partition);
+ io.trino.hive.thrift.metastore.Partition metastoreApiPartition = ThriftMetastoreUtil.toMetastoreApiPartition(partition);
assertEquals(metastoreApiPartition, TEST_PARTITION);
}
@Test
public void testHiveSchemaTable()
{
- Properties expected = MetaStoreUtils.getTableMetadata(TEST_TABLE_WITH_UNSUPPORTED_FIELDS);
- expected.remove(COLUMN_NAME_DELIMITER);
Properties actual = MetastoreUtil.getHiveSchema(ThriftMetastoreUtil.fromMetastoreApiTable(TEST_TABLE_WITH_UNSUPPORTED_FIELDS, TEST_SCHEMA));
- assertEquals(actual, expected);
+ assertEquals(actual, TEST_TABLE_METADATA);
}
@Test
public void testHiveSchemaPartition()
{
- Properties expected = MetaStoreUtils.getPartitionMetadata(TEST_PARTITION_WITH_UNSUPPORTED_FIELDS, TEST_TABLE_WITH_UNSUPPORTED_FIELDS);
- expected.remove(COLUMN_NAME_DELIMITER);
Properties actual = MetastoreUtil.getHiveSchema(ThriftMetastoreUtil.fromMetastoreApiPartition(TEST_PARTITION_WITH_UNSUPPORTED_FIELDS), ThriftMetastoreUtil.fromMetastoreApiTable(TEST_TABLE_WITH_UNSUPPORTED_FIELDS, TEST_SCHEMA));
- assertEquals(actual, expected);
+ assertEquals(actual, TEST_TABLE_METADATA);
}
@Test
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/InMemoryThriftMetastore.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/InMemoryThriftMetastore.java
index 4241cff4db5e..60dc293605e6 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/InMemoryThriftMetastore.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/InMemoryThriftMetastore.java
@@ -16,6 +16,12 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
+import io.trino.hive.thrift.metastore.Database;
+import io.trino.hive.thrift.metastore.FieldSchema;
+import io.trino.hive.thrift.metastore.Partition;
+import io.trino.hive.thrift.metastore.PrincipalPrivilegeSet;
+import io.trino.hive.thrift.metastore.PrincipalType;
+import io.trino.hive.thrift.metastore.Table;
import io.trino.plugin.hive.HiveColumnStatisticType;
import io.trino.plugin.hive.PartitionStatistics;
import io.trino.plugin.hive.SchemaAlreadyExistsException;
@@ -34,12 +40,6 @@
import io.trino.spi.type.Type;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.Table;
import javax.annotation.concurrent.GuardedBy;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/MockThriftMetastoreClient.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/MockThriftMetastoreClient.java
index e3302485c50b..acc15c6c59f1 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/MockThriftMetastoreClient.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/MockThriftMetastoreClient.java
@@ -16,30 +16,30 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
+import io.trino.hive.thrift.metastore.ColumnStatisticsData;
+import io.trino.hive.thrift.metastore.ColumnStatisticsObj;
+import io.trino.hive.thrift.metastore.Database;
+import io.trino.hive.thrift.metastore.EnvironmentContext;
+import io.trino.hive.thrift.metastore.FieldSchema;
+import io.trino.hive.thrift.metastore.HiveObjectPrivilege;
+import io.trino.hive.thrift.metastore.HiveObjectRef;
+import io.trino.hive.thrift.metastore.LockRequest;
+import io.trino.hive.thrift.metastore.LockResponse;
+import io.trino.hive.thrift.metastore.LongColumnStatsData;
+import io.trino.hive.thrift.metastore.NoSuchObjectException;
+import io.trino.hive.thrift.metastore.Partition;
+import io.trino.hive.thrift.metastore.PrincipalType;
+import io.trino.hive.thrift.metastore.PrivilegeBag;
+import io.trino.hive.thrift.metastore.Role;
+import io.trino.hive.thrift.metastore.RolePrincipalGrant;
+import io.trino.hive.thrift.metastore.SerDeInfo;
+import io.trino.hive.thrift.metastore.StorageDescriptor;
+import io.trino.hive.thrift.metastore.Table;
import io.trino.plugin.hive.acid.AcidOperation;
import io.trino.testng.services.ManageTestResources;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
-import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
-import org.apache.hadoop.hive.metastore.api.LockRequest;
-import org.apache.hadoop.hive.metastore.api.LockResponse;
-import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
-import org.apache.hadoop.hive.metastore.api.Role;
-import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
-import org.apache.hadoop.hive.metastore.api.SerDeInfo;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.thrift.TException;
import java.util.List;
@@ -47,8 +47,8 @@
import java.util.concurrent.atomic.AtomicInteger;
import static com.google.common.collect.ImmutableList.toImmutableList;
-import static org.apache.hadoop.hive.metastore.api.PrincipalType.ROLE;
-import static org.apache.hadoop.hive.metastore.api.PrincipalType.USER;
+import static io.trino.hive.thrift.metastore.PrincipalType.ROLE;
+import static io.trino.hive.thrift.metastore.PrincipalType.USER;
@ManageTestResources.Suppress(because = "close() is no-op and instance's resources are negligible")
public class MockThriftMetastoreClient
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestStaticTokenAwareMetastoreClientFactory.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestStaticTokenAwareMetastoreClientFactory.java
index 7e211bee0695..51901c7711ce 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestStaticTokenAwareMetastoreClientFactory.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestStaticTokenAwareMetastoreClientFactory.java
@@ -17,7 +17,7 @@
import com.google.common.base.Ticker;
import com.google.common.collect.ImmutableMap;
import io.airlift.testing.TestingTicker;
-import org.apache.hadoop.hive.metastore.api.Table;
+import io.trino.hive.thrift.metastore.Table;
import org.apache.thrift.TException;
import org.testng.annotations.Test;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestTFilterTransport.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestTFilterTransport.java
new file mode 100644
index 000000000000..2c78ba31b6ad
--- /dev/null
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestTFilterTransport.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.hive.metastore.thrift;
+
+import org.apache.thrift.transport.TTransport;
+import org.testng.annotations.Test;
+
+import static io.trino.spi.testing.InterfaceTestUtils.assertAllMethodsOverridden;
+
+public class TestTFilterTransport
+{
+ @Test
+ public void testEverythingImplemented()
+ {
+ assertAllMethodsOverridden(TTransport.class, TFilterTransport.class);
+ }
+}
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftHiveMetastoreClient.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftHiveMetastoreClient.java
index 667ce505478f..56dfcc9058a6 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftHiveMetastoreClient.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftHiveMetastoreClient.java
@@ -14,9 +14,9 @@
package io.trino.plugin.hive.metastore.thrift;
import io.trino.plugin.hive.metastore.thrift.ThriftHiveMetastoreClient.AlternativeCall;
+import org.apache.thrift.TConfiguration;
import org.apache.thrift.TException;
import org.apache.thrift.transport.TTransport;
-import org.apache.thrift.transport.TTransportException;
import org.testng.annotations.Test;
import java.util.concurrent.atomic.AtomicInteger;
@@ -80,7 +80,6 @@ public boolean isOpen()
@Override
public void open()
- throws TTransportException
{
throw new UnsupportedOperationException();
}
@@ -93,14 +92,30 @@ public void close()
@Override
public int read(byte[] bytes, int i, int i1)
- throws TTransportException
{
throw new UnsupportedOperationException();
}
@Override
public void write(byte[] bytes, int i, int i1)
- throws TTransportException
+ {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public TConfiguration getConfiguration()
+ {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void updateKnownMessageSize(long size)
+ {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void checkReadBytesAvailable(long numBytes)
{
throw new UnsupportedOperationException();
}
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftMetastoreUtil.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftMetastoreUtil.java
index 81d4f28c37fe..d62c38186a02 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftMetastoreUtil.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestThriftMetastoreUtil.java
@@ -16,6 +16,15 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.Multimap;
+import io.trino.hive.thrift.metastore.BinaryColumnStatsData;
+import io.trino.hive.thrift.metastore.BooleanColumnStatsData;
+import io.trino.hive.thrift.metastore.ColumnStatisticsObj;
+import io.trino.hive.thrift.metastore.Date;
+import io.trino.hive.thrift.metastore.DateColumnStatsData;
+import io.trino.hive.thrift.metastore.DecimalColumnStatsData;
+import io.trino.hive.thrift.metastore.DoubleColumnStatsData;
+import io.trino.hive.thrift.metastore.LongColumnStatsData;
+import io.trino.hive.thrift.metastore.StringColumnStatsData;
import io.trino.plugin.hive.HiveBasicStatistics;
import io.trino.plugin.hive.metastore.BooleanStatistics;
import io.trino.plugin.hive.metastore.DateStatistics;
@@ -26,15 +35,6 @@
import io.trino.plugin.hive.metastore.IntegerStatistics;
import io.trino.spi.security.RoleGrant;
import io.trino.spi.security.TrinoPrincipal;
-import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
-import org.apache.hadoop.hive.metastore.api.Date;
-import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
-import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
import org.testng.annotations.Test;
import java.math.BigDecimal;
@@ -44,19 +44,19 @@
import java.util.OptionalLong;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.binaryStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.booleanStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.dateStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.decimalStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.doubleStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.longStats;
+import static io.trino.hive.thrift.metastore.ColumnStatisticsData.stringStats;
import static io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.fromMetastoreApiColumnStatistics;
import static io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.getHiveBasicStatistics;
import static io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.toMetastoreDecimal;
import static io.trino.plugin.hive.metastore.thrift.ThriftMetastoreUtil.updateStatisticsParameters;
import static io.trino.spi.security.PrincipalType.ROLE;
import static io.trino.spi.security.PrincipalType.USER;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.binaryStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.booleanStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.dateStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.decimalStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.doubleStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.longStats;
-import static org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.stringStats;
import static org.apache.hadoop.hive.serde.serdeConstants.BIGINT_TYPE_NAME;
import static org.apache.hadoop.hive.serde.serdeConstants.BINARY_TYPE_NAME;
import static org.apache.hadoop.hive.serde.serdeConstants.BOOLEAN_TYPE_NAME;
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestTxnUtils.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestTxnUtils.java
new file mode 100644
index 000000000000..acbbcf78b50a
--- /dev/null
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestTxnUtils.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.trino.plugin.hive.metastore.thrift;
+
+import io.trino.hive.thrift.metastore.GetOpenTxnsResponse;
+import io.trino.hive.thrift.metastore.TableValidWriteIds;
+import org.testng.annotations.Test;
+
+import java.nio.ByteBuffer;
+import java.util.BitSet;
+import java.util.List;
+
+import static io.trino.plugin.hive.metastore.thrift.TxnUtils.createValidReadTxnList;
+import static io.trino.plugin.hive.metastore.thrift.TxnUtils.createValidTxnWriteIdList;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class TestTxnUtils
+{
+ @Test
+ public void testCreateValidReadTxnList()
+ {
+ BitSet aborted = new BitSet();
+ aborted.set(2, 4);
+ ByteBuffer abortedBits = ByteBuffer.wrap(aborted.toByteArray());
+
+ long currentTxn = 7;
+ var trinoResponse = new GetOpenTxnsResponse(6, List.of(1L, 2L, 3L), abortedBits);
+ String trinoValue = createValidReadTxnList(trinoResponse, currentTxn);
+
+ var hiveResponse = new org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse(
+ trinoResponse.getTxnHighWaterMark(),
+ trinoResponse.getOpenTxns(),
+ trinoResponse.bufferForAbortedBits());
+ String hiveValue = org.apache.hadoop.hive.metastore.txn.TxnUtils.createValidReadTxnList(hiveResponse, currentTxn).toString();
+
+ assertThat(trinoValue)
+ .isEqualTo(hiveValue)
+ .isEqualTo("6:1:1,2,0:3");
+ }
+
+ @Test
+ public void testCreateValidTxnWriteIdList()
+ {
+ BitSet aborted = new BitSet();
+ aborted.set(2, 4);
+ ByteBuffer abortedBits = ByteBuffer.wrap(aborted.toByteArray());
+
+ long currentTxn = 7;
+ TableValidWriteIds table1 = new TableValidWriteIds("abc.xyz", 6, List.of(1L, 2L, 3L), abortedBits);
+ TableValidWriteIds table2 = new TableValidWriteIds("foo.bar", 5, List.of(3L, 4L), ByteBuffer.wrap(new byte[0]));
+ table2.setMinOpenWriteId(2);
+ var trinoIds = List.of(table1, table2);
+ String trinoValue = createValidTxnWriteIdList(currentTxn, trinoIds);
+
+ var hiveIds = trinoIds.stream()
+ .map(ids -> {
+ return toHiveTableValidWriteIds(ids);
+ })
+ .toList();
+ String hiveValue = org.apache.hadoop.hive.metastore.txn.TxnUtils.createValidTxnWriteIdList(currentTxn, hiveIds).toString();
+
+ // the expected result depends on HashMap iteration order (matches Hive behavior)
+ assertThat(trinoValue)
+ .isEqualTo(hiveValue)
+ .isEqualTo("7$foo.bar:5:2:3,4:$abc.xyz:6:9223372036854775807:1,2:3");
+ }
+
+ @Test
+ public void testCreateValidWriteIdList()
+ {
+ BitSet aborted = new BitSet();
+ aborted.set(2, 4);
+ ByteBuffer abortedBits = ByteBuffer.wrap(aborted.toByteArray());
+
+ var trinoIds = new TableValidWriteIds("abc.xyz", 6, List.of(1L, 2L, 3L), abortedBits);
+ String trinoValue = TxnUtils.createValidWriteIdList(trinoIds);
+
+ var hiveIds = toHiveTableValidWriteIds(trinoIds);
+ String hiveValue = org.apache.hadoop.hive.metastore.txn.TxnUtils.createValidReaderWriteIdList(hiveIds).toString();
+
+ assertThat(trinoValue)
+ .isEqualTo(hiveValue)
+ .isEqualTo("abc.xyz:6:9223372036854775807:1,2:3");
+ }
+
+ private static org.apache.hadoop.hive.metastore.api.TableValidWriteIds toHiveTableValidWriteIds(TableValidWriteIds ids)
+ {
+ var result = new org.apache.hadoop.hive.metastore.api.TableValidWriteIds(
+ ids.getFullTableName(),
+ ids.getWriteIdHighWaterMark(),
+ ids.getInvalidWriteIds(),
+ ids.bufferForAbortedBits());
+ if (ids.isSetMinOpenWriteId()) {
+ result.setMinOpenWriteId(ids.getMinOpenWriteId());
+ }
+ return result;
+ }
+}
diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestLoggingInvocationHandlerWithHiveMetastore.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestLoggingInvocationHandlerWithHiveMetastore.java
index 2e2882e592be..928096dc9865 100644
--- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestLoggingInvocationHandlerWithHiveMetastore.java
+++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/util/TestLoggingInvocationHandlerWithHiveMetastore.java
@@ -13,9 +13,9 @@
*/
package io.trino.plugin.hive.util;
+import io.trino.hive.thrift.metastore.ThriftHiveMetastore;
import io.trino.plugin.base.util.LoggingInvocationHandler;
-import io.trino.plugin.base.util.LoggingInvocationHandler.AirliftParameterNamesProvider;
-import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore;
+import org.assertj.core.api.InstanceOfAssertFactories;
import org.testng.annotations.Test;
import java.util.ArrayList;
@@ -36,14 +36,15 @@ public void testWithThriftHiveMetastoreClient()
List messages = new ArrayList<>();
// LoggingInvocationHandler is used e.g. with ThriftHiveMetastore.Iface. Since the logging is reflection-based,
// we test it with this interface as well.
- ThriftHiveMetastore.Iface proxy = newProxy(ThriftHiveMetastore.Iface.class, new LoggingInvocationHandler(
- dummyThriftHiveMetastoreClient(),
- new AirliftParameterNamesProvider(ThriftHiveMetastore.Iface.class, ThriftHiveMetastore.Client.class),
- messages::add));
- proxy.get_table("some_database", "some_table_name");
+ ThriftHiveMetastore.Iface proxy = newProxy(
+ ThriftHiveMetastore.Iface.class,
+ new LoggingInvocationHandler(dummyThriftHiveMetastoreClient(), messages::add));
+ proxy.getTable("some_database", "some_table_name");
assertThat(messages)
.hasSize(1)
- .element(0).matches(message -> message.matches("\\QInvocation of get_table(dbname='some_database', tbl_name='some_table_name') succeeded in\\E " + DURATION_PATTERN));
+ .element(0)
+ .asInstanceOf(InstanceOfAssertFactories.STRING)
+ .matches("\\QInvocation of getTable(dbname='some_database', tbl_name='some_table_name') succeeded in\\E " + DURATION_PATTERN);
}
private static ThriftHiveMetastore.Iface dummyThriftHiveMetastoreClient()
diff --git a/plugin/trino-iceberg/pom.xml b/plugin/trino-iceberg/pom.xml
index bbf4d3de0d13..30b81865c50e 100644
--- a/plugin/trino-iceberg/pom.xml
+++ b/plugin/trino-iceberg/pom.xml
@@ -83,6 +83,11 @@
hive-apache
+
+ io.trino.hive
+ hive-thrift
+
+
io.airlift
bootstrap
diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestIcebergHiveMetastoreTableOperationsReleaseLockFailure.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestIcebergHiveMetastoreTableOperationsReleaseLockFailure.java
index 111b8cd50a7f..ff254f33c4ce 100644
--- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestIcebergHiveMetastoreTableOperationsReleaseLockFailure.java
+++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestIcebergHiveMetastoreTableOperationsReleaseLockFailure.java
@@ -15,6 +15,7 @@
import com.google.common.collect.ImmutableMap;
import io.trino.Session;
+import io.trino.hive.thrift.metastore.Table;
import io.trino.plugin.hive.metastore.AcidTransactionOwner;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveMetastore;
@@ -103,7 +104,7 @@ public void releaseTableLock(long lockId)
}
@Override
- public synchronized void createTable(org.apache.hadoop.hive.metastore.api.Table table)
+ public synchronized void createTable(Table table)
{
// InMemoryThriftMetastore throws an exception if the table has any privileges set
table.setPrivileges(null);
diff --git a/pom.xml b/pom.xml
index 92efc51d0e33..7f6f2d49981a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -747,6 +747,12 @@
0.13.1-9
+
+ io.trino.hive
+ hive-thrift
+ 1
+
+
io.trino.orc
orc-protobuf
@@ -1740,21 +1746,7 @@
org.apache.thrift
libthrift
- 0.9.3-1
-
-
- org.apache.httpcomponents
- httpcore
-
-
- org.apache.httpcomponents
- httpclient
-
-
- javax.servlet
- servlet-api
-
-
+ 0.17.0
diff --git a/testing/trino-product-tests/pom.xml b/testing/trino-product-tests/pom.xml
index 3817deea3610..37d7c40f1f17 100644
--- a/testing/trino-product-tests/pom.xml
+++ b/testing/trino-product-tests/pom.xml
@@ -66,6 +66,11 @@
hive-apache
+
+ io.trino.hive
+ hive-thrift
+
+
io.trino.tempto
tempto-core