diff --git a/pom.xml b/pom.xml
index dc4d865..1a3564f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -10,7 +10,7 @@
io.prestosql.hivehive-apache
- 3.0.0-7-SNAPSHOT
+ 3.1.2-1-SNAPSHOThive-apacheShaded version of Apache Hive for Presto
@@ -43,13 +43,12 @@
io.prestosql.hive.\$internal
- 3.0.0
+ 3.1.21.8.2
- 3.5.21.11.02.5.0
- 1.7.25
+ 1.7.29
@@ -287,10 +286,6 @@
org.apache.ivyivy
-
- commons-httpclient
- commons-httpclient
- commons-iocommons-io
@@ -533,12 +528,6 @@
${dep.protobuf.version}
-
- org.jodd
- jodd-core
- ${dep.jodd.version}
-
-
org.apache.hadoop
@@ -677,10 +666,6 @@
org.tukaani.xz${shadeBase}.org.tukaani.xz
-
- jodd
- ${shadeBase}.jodd
- com.codahale.metrics${shadeBase}.com.codahale.metrics
@@ -725,6 +710,7 @@
hive-exec-log4j2.propertiestez-container-log4j2.propertiesorg/apache/hadoop/hive/ql/io/CodecPool*.class
+ org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.classorg/apache/tez/**
diff --git a/src/main/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java b/src/main/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
new file mode 100644
index 0000000..6ee2665
--- /dev/null
+++ b/src/main/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
@@ -0,0 +1,24 @@
+package org.apache.hadoop.hive.ql.io.parquet.timestamp;
+
+import org.apache.hadoop.hive.common.type.Timestamp;
+
+import static java.lang.Math.toIntExact;
+import static java.util.concurrent.TimeUnit.SECONDS;
+
+public final class NanoTimeUtils
+{
+ private NanoTimeUtils() {}
+
+ private static final int JULIAN_EPOCH_OFFSET_DAYS = 2_440_588;
+
+ public static NanoTime getNanoTime(Timestamp timestamp, @SuppressWarnings("unused") boolean ignored)
+ {
+ int epochDay = toIntExact(SECONDS.toDays(timestamp.toEpochSecond()));
+ int julianDay = JULIAN_EPOCH_OFFSET_DAYS + epochDay;
+
+ long timeOfDaySeconds = timestamp.toEpochSecond() % 86400;
+ long timeOfDayNanos = SECONDS.toNanos(timeOfDaySeconds) + timestamp.getNanos();
+
+ return new NanoTime(julianDay, timeOfDayNanos);
+ }
+}
diff --git a/src/main/java/org/apache/hadoop/hive/serde2/SerDe.java b/src/main/java/org/apache/hadoop/hive/serde2/SerDe.java
deleted file mode 100644
index f45ebdb..0000000
--- a/src/main/java/org/apache/hadoop/hive/serde2/SerDe.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.serde2;
-
-/**
- * Legacy interface from Hive 1.x needed for DWRF
- */
-@Deprecated
-public interface SerDe
- extends Deserializer, Serializer
-{}
diff --git a/src/main/java/org/apache/hadoop/hive/serde2/columnar/OptimizedLazyBinaryColumnarSerde.java b/src/main/java/org/apache/hadoop/hive/serde2/columnar/OptimizedLazyBinaryColumnarSerde.java
deleted file mode 100644
index d27fd53..0000000
--- a/src/main/java/org/apache/hadoop/hive/serde2/columnar/OptimizedLazyBinaryColumnarSerde.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.serde2.columnar;
-
-import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StructField;
-import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
-import org.apache.hadoop.io.Writable;
-
-import java.util.List;
-
-/**
- * This is a variant of {@link LazyBinaryColumnarSerDe} that avoids a call to
- * {@code StringObjectInspector.getPrimitiveJavaObject()} in {@code serialize()}
- * to check whether the string is empty (it calls {@code getPrimitiveWritableObject()}
- * instead). This improves CPU efficiency by avoiding turning the underlying bytes into
- * a Java String.
- */
-public class OptimizedLazyBinaryColumnarSerde
- extends LazyBinaryColumnarSerDe
-{
- @Override
- public Writable serialize(Object obj, ObjectInspector objInspector)
- throws SerDeException
- {
- if (objInspector.getCategory() != ObjectInspector.Category.STRUCT) {
- throw new SerDeException(getClass().toString()
- + " can only serialize struct types, but we got: "
- + objInspector.getTypeName());
- }
-
- StructObjectInspector soi = (StructObjectInspector) objInspector;
- List extends StructField> fields = soi.getAllStructFieldRefs();
- List