diff --git a/.github/workflows/hive-ci.yml b/.github/workflows/hive-ci.yml index d95ca1bd5c6a..371af2b31a2d 100644 --- a/.github/workflows/hive-ci.yml +++ b/.github/workflows/hive-ci.yml @@ -66,7 +66,7 @@ concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' }} jobs: - hive2-tests: + mr-tests: runs-on: ubuntu-22.04 strategy: matrix: @@ -87,7 +87,7 @@ jobs: key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} restore-keys: ${{ runner.os }}-gradle- - run: echo -e "$(ip addr show eth0 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1)\t$(hostname -f) $(hostname -s)" | sudo tee -a /etc/hosts - - run: ./gradlew -DsparkVersions= -DhiveVersions=2 -DflinkVersions= -DkafkaVersions= -Pquick=true :iceberg-mr:check :iceberg-hive-runtime:check -x javadoc + - run: ./gradlew -DsparkVersions= -DhiveVersions=3 -DflinkVersions= -DkafkaVersions= -Pquick=true :iceberg-mr:check :iceberg-hive-runtime:check -x javadoc - uses: actions/upload-artifact@v4 if: failure() with: diff --git a/build.gradle b/build.gradle index 81daf14a357f..9a605f230d5b 100644 --- a/build.gradle +++ b/build.gradle @@ -675,7 +675,7 @@ project(':iceberg-hive-metastore') { compileOnly libs.avro.avro - compileOnly(libs.hive2.metastore) { + compileOnly(libs.hive3.metastore) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -695,7 +695,7 @@ project(':iceberg-hive-metastore') { // that's really old. We use the core classifier to be able to override our guava // version. Luckily, hive-exec seems to work okay so far with this version of guava // See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context. - testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { + testImplementation("${libs.hive3.exec.get().module}:${libs.hive3.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -707,7 +707,7 @@ project(':iceberg-hive-metastore') { exclude group: 'com.google.code.findbugs', module: 'jsr305' } - testImplementation(libs.hive2.metastore) { + testImplementation(libs.hive3.metastore) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -723,7 +723,9 @@ project(':iceberg-hive-metastore') { exclude group: 'com.zaxxer', module: 'HikariCP' } - compileOnly(libs.hadoop2.client) { + testImplementation(libs.hive3.standalone.metastore) + + compileOnly(libs.hadoop3.client) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' } diff --git a/flink/v1.18/build.gradle b/flink/v1.18/build.gradle index 83dc07523a3c..fbafe9d83624 100644 --- a/flink/v1.18/build.gradle +++ b/flink/v1.18/build.gradle @@ -88,7 +88,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { // that's really old. We use the core classifier to be able to override our guava // version. Luckily, hive-exec seems to work okay so far with this version of guava // See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context. - testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { + testImplementation("${libs.hive3.exec.get().module}:${libs.hive3.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -100,7 +100,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { exclude group: 'com.google.code.findbugs', module: 'jsr305' } - testImplementation(libs.hive2.metastore) { + testImplementation(libs.hive3.metastore) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -192,7 +192,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") { exclude group: 'org.apache.avro', module: 'avro' } - integrationImplementation(libs.hive2.metastore) { + integrationImplementation(libs.hive3.metastore) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -209,7 +209,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") { exclude group: 'org.slf4j' } - integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { + integrationImplementation("${libs.hive3.exec.get().module}:${libs.hive3.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency diff --git a/flink/v1.19/build.gradle b/flink/v1.19/build.gradle index 50bcadb618e4..7159fd493844 100644 --- a/flink/v1.19/build.gradle +++ b/flink/v1.19/build.gradle @@ -88,7 +88,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { // that's really old. We use the core classifier to be able to override our guava // version. Luckily, hive-exec seems to work okay so far with this version of guava // See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context. - testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { + testImplementation("${libs.hive3.exec.get().module}:${libs.hive3.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -100,7 +100,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { exclude group: 'com.google.code.findbugs', module: 'jsr305' } - testImplementation(libs.hive2.metastore) { + testImplementation(libs.hive3.metastore) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -193,7 +193,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") { exclude group: 'org.apache.avro', module: 'avro' } - integrationImplementation(libs.hive2.metastore) { + integrationImplementation(libs.hive3.metastore) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -210,7 +210,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") { exclude group: 'org.slf4j' } - integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { + integrationImplementation("${libs.hive3.exec.get().module}:${libs.hive3.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency diff --git a/flink/v1.20/build.gradle b/flink/v1.20/build.gradle index 4a1bae660bdb..8c0998e06098 100644 --- a/flink/v1.20/build.gradle +++ b/flink/v1.20/build.gradle @@ -88,7 +88,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { // that's really old. We use the core classifier to be able to override our guava // version. Luckily, hive-exec seems to work okay so far with this version of guava // See: https://github.com/apache/hive/blob/master/ql/pom.xml#L911 for more context. - testImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { + testImplementation("${libs.hive3.exec.get().module}:${libs.hive3.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -100,7 +100,7 @@ project(":iceberg-flink:iceberg-flink-${flinkMajorVersion}") { exclude group: 'com.google.code.findbugs', module: 'jsr305' } - testImplementation(libs.hive2.metastore) { + testImplementation(libs.hive3.metastore) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -193,7 +193,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") { exclude group: 'org.apache.avro', module: 'avro' } - integrationImplementation(libs.hive2.metastore) { + integrationImplementation(libs.hive3.metastore) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency @@ -210,7 +210,7 @@ project(":iceberg-flink:iceberg-flink-runtime-${flinkMajorVersion}") { exclude group: 'org.slf4j' } - integrationImplementation("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { + integrationImplementation("${libs.hive3.exec.get().module}:${libs.hive3.exec.get().getVersion()}:core") { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency diff --git a/gradle.properties b/gradle.properties index dc1e1a509b01..85660a089221 100644 --- a/gradle.properties +++ b/gradle.properties @@ -18,8 +18,8 @@ jmhJsonOutputPath=build/reports/jmh/results.json jmhIncludeRegex=.* systemProp.defaultFlinkVersions=1.20 systemProp.knownFlinkVersions=1.18,1.19,1.20 -systemProp.defaultHiveVersions=2 -systemProp.knownHiveVersions=2,3 +systemProp.defaultHiveVersions=3 +systemProp.knownHiveVersions=3 systemProp.defaultSparkVersions=3.5 systemProp.knownSparkVersions=3.3,3.4,3.5 systemProp.defaultKafkaVersions=3 diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 96f62fb8163b..38a6fc6b5921 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -49,7 +49,6 @@ guava = "33.3.1-jre" hadoop2 = "2.7.3" hadoop3 = "3.4.1" httpcomponents-httpclient5 = "5.4.1" -hive2 = { strictly = "2.3.9"} # see rich version usage explanation above hive3 = "3.1.3" immutables-value = "2.10.1" jackson-bom = "2.18.1" @@ -135,12 +134,9 @@ hadoop2-mapreduce-client-core = { module = "org.apache.hadoop:hadoop-mapreduce-c hadoop2-minicluster = { module = "org.apache.hadoop:hadoop-minicluster", version.ref = "hadoop2" } hadoop3-client = { module = "org.apache.hadoop:hadoop-client", version.ref = "hadoop3" } hadoop3-common = { module = "org.apache.hadoop:hadoop-common", version.ref = "hadoop3" } -hive2-exec = { module = "org.apache.hive:hive-exec", version.ref = "hive2" } -hive2-metastore = { module = "org.apache.hive:hive-metastore", version.ref = "hive2" } -hive2-serde = { module = "org.apache.hive:hive-serde", version.ref = "hive2" } -hive2-service = { module = "org.apache.hive:hive-service", version.ref = "hive2" } hive3-exec = { module = "org.apache.hive:hive-exec", version.ref = "hive3" } hive3-metastore = { module = "org.apache.hive:hive-metastore", version.ref = "hive3" } +hive3-standalone-metastore = { module = "org.apache.hive:hive-standalone-metastore", version.ref = "hive3" } hive3-serde = { module = "org.apache.hive:hive-serde", version.ref = "hive3" } hive3-service = { module = "org.apache.hive:hive-service", version.ref = "hive3" } httpcomponents-httpclient5 = { module = "org.apache.httpcomponents.client5:httpclient5", version.ref = "httpcomponents-httpclient5" } diff --git a/hive-metastore/src/test/java/org/apache/iceberg/hive/HiveTableTest.java b/hive-metastore/src/test/java/org/apache/iceberg/hive/HiveTableTest.java index 13c459128dec..40bbf6ec7555 100644 --- a/hive-metastore/src/test/java/org/apache/iceberg/hive/HiveTableTest.java +++ b/hive-metastore/src/test/java/org/apache/iceberg/hive/HiveTableTest.java @@ -509,7 +509,7 @@ public void testRegisterHadoopTableToHiveCatalog() throws IOException, TExceptio assertThatThrownBy(() -> HIVE_METASTORE_EXTENSION.metastoreClient().getTable(DB_NAME, "table1")) .isInstanceOf(NoSuchObjectException.class) - .hasMessage("hivedb.table1 table not found"); + .hasMessage("hive.hivedb.table1 table not found"); assertThatThrownBy(() -> catalog.loadTable(identifier)) .isInstanceOf(NoSuchTableException.class) .hasMessage("Table does not exist: hivedb.table1"); diff --git a/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveClientPool.java b/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveClientPool.java index 2fe1bacf9dd1..d57c1bd63c6f 100644 --- a/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveClientPool.java +++ b/hive-metastore/src/test/java/org/apache/iceberg/hive/TestHiveClientPool.java @@ -30,12 +30,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.FunctionType; import org.apache.hadoop.hive.metastore.api.GetAllFunctionsResponse; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.metastore.utils.JavaUtils; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.thrift.transport.TTransportException; import org.junit.jupiter.api.AfterEach; @@ -121,36 +121,36 @@ public void testGetTablesFailsForNonReconnectableException() throws Exception { @Test public void testExceptionMessages() { - try (MockedStatic mockedStatic = Mockito.mockStatic(MetaStoreUtils.class)) { + try (MockedStatic mockedStatic = Mockito.mockStatic(JavaUtils.class)) { mockedStatic - .when(() -> MetaStoreUtils.newInstance(any(), any(), any())) + .when(() -> JavaUtils.newInstance(any(), any(), any())) .thenThrow(new RuntimeException(new MetaException("Another meta exception"))); assertThatThrownBy(() -> clients.run(client -> client.getTables("default", "t"))) .isInstanceOf(RuntimeMetaException.class) .hasMessage("Failed to connect to Hive Metastore"); } - try (MockedStatic mockedStatic = Mockito.mockStatic(MetaStoreUtils.class)) { + try (MockedStatic mockedStatic = Mockito.mockStatic(JavaUtils.class)) { mockedStatic - .when(() -> MetaStoreUtils.newInstance(any(), any(), any())) + .when(() -> JavaUtils.newInstance(any(), any(), any())) .thenThrow(new RuntimeException(new MetaException())); assertThatThrownBy(() -> clients.run(client -> client.getTables("default", "t"))) .isInstanceOf(RuntimeMetaException.class) .hasMessage("Failed to connect to Hive Metastore"); } - try (MockedStatic mockedStatic = Mockito.mockStatic(MetaStoreUtils.class)) { + try (MockedStatic mockedStatic = Mockito.mockStatic(JavaUtils.class)) { mockedStatic - .when(() -> MetaStoreUtils.newInstance(any(), any(), any())) + .when(() -> JavaUtils.newInstance(any(), any(), any())) .thenThrow(new RuntimeException()); assertThatThrownBy(() -> clients.run(client -> client.getTables("default", "t"))) .isInstanceOf(RuntimeMetaException.class) .hasMessage("Failed to connect to Hive Metastore"); } - try (MockedStatic mockedStatic = Mockito.mockStatic(MetaStoreUtils.class)) { + try (MockedStatic mockedStatic = Mockito.mockStatic(JavaUtils.class)) { mockedStatic - .when(() -> MetaStoreUtils.newInstance(any(), any(), any())) + .when(() -> JavaUtils.newInstance(any(), any(), any())) .thenThrow(new RuntimeException("Another instance of Derby may have already booted")); assertThatThrownBy(() -> clients.run(client -> client.getTables("default", "t"))) .isInstanceOf(RuntimeMetaException.class) diff --git a/kafka-connect/build.gradle b/kafka-connect/build.gradle index 15bf013f28b2..2067e1eb0084 100644 --- a/kafka-connect/build.gradle +++ b/kafka-connect/build.gradle @@ -134,7 +134,7 @@ project(':iceberg-kafka-connect:iceberg-kafka-connect-runtime') { implementation 'com.azure:azure-identity' hive project(':iceberg-hive-metastore') - hive(libs.hive2.metastore) { + hive(libs.hive3.metastore) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' exclude group: 'org.pentaho' // missing dependency diff --git a/mr/build.gradle b/mr/build.gradle index bf8f9ee943f7..c70a1dd30dc2 100644 --- a/mr/build.gradle +++ b/mr/build.gradle @@ -41,7 +41,7 @@ project(':iceberg-mr') { exclude group: 'org.apache.avro', module: 'avro' } - compileOnly("${libs.hive2.exec.get().module}:${libs.hive2.exec.get().getVersion()}:core") { + compileOnly("${libs.hive3.exec.get().module}:${libs.hive3.exec.get().getVersion()}:core") { exclude group: 'com.google.code.findbugs', module: 'jsr305' exclude group: 'com.google.guava' exclude group: 'com.google.protobuf', module: 'protobuf-java' @@ -52,8 +52,8 @@ project(':iceberg-mr') { exclude group: 'org.pentaho' // missing dependency exclude group: 'org.slf4j', module: 'slf4j-log4j12' } - compileOnly libs.hive2.metastore - compileOnly libs.hive2.serde + compileOnly libs.hive3.metastore + compileOnly libs.hive3.serde implementation libs.caffeine @@ -70,7 +70,7 @@ project(':iceberg-mr') { testImplementation libs.kryo.shaded testImplementation platform(libs.jackson.bom) testImplementation libs.jackson.annotations - testImplementation(libs.hive2.service) { + testImplementation(libs.hive3.service) { exclude group: 'org.apache.hive', module: 'hive-exec' } testImplementation libs.tez08.dag diff --git a/mr/src/main/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSupport.java b/mr/src/main/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSupport.java deleted file mode 100644 index b6dd984a5843..000000000000 --- a/mr/src/main/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSupport.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.hadoop.hive.ql.exec.vector; - -import java.util.Locale; -import java.util.Map; -import org.apache.iceberg.relocated.com.google.common.collect.Maps; - -/** Copied here from Hive for compatibility */ -@SuppressWarnings("VisibilityModifier") -public class VectorizedSupport { - public enum Support { - DECIMAL_64; - - final String lowerCaseName; - - Support() { - this.lowerCaseName = name().toLowerCase(Locale.ROOT); - } - - @SuppressWarnings("checkstyle:ConstantName") - public static final Map nameToSupportMap = Maps.newHashMap(); - - static { - for (Support support : values()) { - nameToSupportMap.put(support.lowerCaseName, support); - } - } - } -} diff --git a/mr/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergInputFormat.java b/mr/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergInputFormat.java index 5f2eb9834b63..fc3157b02ff6 100644 --- a/mr/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergInputFormat.java +++ b/mr/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergInputFormat.java @@ -40,7 +40,6 @@ import org.apache.iceberg.common.DynConstructors; import org.apache.iceberg.data.Record; import org.apache.iceberg.expressions.Expression; -import org.apache.iceberg.hive.HiveVersion; import org.apache.iceberg.mr.InputFormatConfig; import org.apache.iceberg.mr.mapred.AbstractMapredIcebergRecordReader; import org.apache.iceberg.mr.mapred.Container; @@ -48,7 +47,6 @@ import org.apache.iceberg.mr.mapreduce.IcebergInputFormat; import org.apache.iceberg.mr.mapreduce.IcebergSplit; import org.apache.iceberg.mr.mapreduce.IcebergSplitContainer; -import org.apache.iceberg.relocated.com.google.common.base.Preconditions; import org.apache.iceberg.util.SerializationUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,12 +57,12 @@ public class HiveIcebergInputFormat extends MapredIcebergInputFormat private static final Logger LOG = LoggerFactory.getLogger(HiveIcebergInputFormat.class); private static final String HIVE_VECTORIZED_RECORDREADER_CLASS = "org.apache.iceberg.mr.hive.vector.HiveIcebergVectorizedRecordReader"; - private static final DynConstructors.Ctor - HIVE_VECTORIZED_RECORDREADER_CTOR; + private static DynConstructors.Ctor + hiveVectorizedRecordreaderCtor; static { - if (HiveVersion.min(HiveVersion.HIVE_3)) { - HIVE_VECTORIZED_RECORDREADER_CTOR = + try { + hiveVectorizedRecordreaderCtor = DynConstructors.builder(AbstractMapredIcebergRecordReader.class) .impl( HIVE_VECTORIZED_RECORDREADER_CLASS, @@ -73,8 +71,8 @@ public class HiveIcebergInputFormat extends MapredIcebergInputFormat JobConf.class, Reporter.class) .build(); - } else { - HIVE_VECTORIZED_RECORDREADER_CTOR = null; + } catch (Exception e) { + hiveVectorizedRecordreaderCtor = null; } } @@ -113,8 +111,6 @@ public RecordReader> getRecordReader( if (HiveConf.getBoolVar(job, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED) && Utilities.getVectorizedRowBatchCtx(job) != null) { - Preconditions.checkArgument( - HiveVersion.min(HiveVersion.HIVE_3), "Vectorization only supported for Hive 3+"); job.setEnum(InputFormatConfig.IN_MEMORY_DATA_MODEL, InputFormatConfig.InMemoryDataModel.HIVE); job.setBoolean(InputFormatConfig.SKIP_RESIDUAL_FILTERING, true); @@ -122,7 +118,7 @@ public RecordReader> getRecordReader( IcebergSplit icebergSplit = ((IcebergSplitContainer) split).icebergSplit(); // bogus cast for favouring code reuse over syntax return (RecordReader) - HIVE_VECTORIZED_RECORDREADER_CTOR.newInstance( + hiveVectorizedRecordreaderCtor.newInstance( new IcebergInputFormat<>(), icebergSplit, job, reporter); } else { return super.getRecordReader(split, job, reporter); @@ -134,9 +130,7 @@ public boolean shouldSkipCombine(Path path, Configuration conf) { return true; } - // Override annotation commented out, since this interface method has been introduced only in Hive - // 3 - // @Override + @Override public VectorizedSupport.Support[] getSupportedFeatures() { return new VectorizedSupport.Support[0]; } diff --git a/mr/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java b/mr/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java index da40f4c73ef3..0f4d1f9b5c7c 100644 --- a/mr/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java +++ b/mr/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java @@ -106,9 +106,7 @@ public void configureOutputJobProperties(TableDesc tableDesc, Map map) {} - // Override annotation commented out, since this interface method has been introduced only in Hive - // 3 - // @Override + @Override public void configureInputJobCredentials(TableDesc tableDesc, Map secrets) {} @Override diff --git a/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergDateObjectInspector.java b/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergDateObjectInspector.java deleted file mode 100644 index 17a82f430208..000000000000 --- a/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergDateObjectInspector.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.iceberg.mr.hive.serde.objectinspector; - -import java.sql.Date; -import java.time.LocalDate; -import org.apache.hadoop.hive.serde2.io.DateWritable; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.apache.iceberg.util.DateTimeUtil; - -public final class IcebergDateObjectInspector extends AbstractPrimitiveJavaObjectInspector - implements DateObjectInspector, WriteObjectInspector { - - private static final IcebergDateObjectInspector INSTANCE = new IcebergDateObjectInspector(); - - public static IcebergDateObjectInspector get() { - return INSTANCE; - } - - private IcebergDateObjectInspector() { - super(TypeInfoFactory.dateTypeInfo); - } - - @Override - public Date getPrimitiveJavaObject(Object o) { - return o == null ? null : Date.valueOf((LocalDate) o); - } - - @Override - public DateWritable getPrimitiveWritableObject(Object o) { - return o == null ? null : new DateWritable(DateTimeUtil.daysFromDate((LocalDate) o)); - } - - @Override - public Object copyObject(Object o) { - if (o == null) { - return null; - } - - if (o instanceof Date) { - return new Date(((Date) o).getTime()); - } else if (o instanceof LocalDate) { - return LocalDate.of( - ((LocalDate) o).getYear(), ((LocalDate) o).getMonth(), ((LocalDate) o).getDayOfMonth()); - } else { - return o; - } - } - - @Override - public LocalDate convert(Object o) { - return o == null ? null : ((Date) o).toLocalDate(); - } -} diff --git a/hive3/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergDateObjectInspectorHive3.java b/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergDateObjectInspectorHive3.java similarity index 100% rename from hive3/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergDateObjectInspectorHive3.java rename to mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergDateObjectInspectorHive3.java diff --git a/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergObjectInspector.java b/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergObjectInspector.java index 8be9a586d553..d0c9d9b75b79 100644 --- a/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergObjectInspector.java +++ b/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergObjectInspector.java @@ -27,39 +27,31 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.iceberg.Schema; import org.apache.iceberg.common.DynMethods; -import org.apache.iceberg.hive.HiveVersion; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; public final class IcebergObjectInspector extends TypeUtil.SchemaVisitor { - // get the correct inspectors depending on whether we're working with Hive2 or Hive3 dependencies - // we need to do this because there is a breaking API change in Date/TimestampObjectInspector - // between Hive2 and Hive3 - private static final String DATE_INSPECTOR_CLASS = - HiveVersion.min(HiveVersion.HIVE_3) - ? "org.apache.iceberg.mr.hive.serde.objectinspector.IcebergDateObjectInspectorHive3" - : "org.apache.iceberg.mr.hive.serde.objectinspector.IcebergDateObjectInspector"; - public static final ObjectInspector DATE_INSPECTOR = - DynMethods.builder("get").impl(DATE_INSPECTOR_CLASS).buildStatic().invoke(); - - private static final String TIMESTAMP_INSPECTOR_CLASS = - HiveVersion.min(HiveVersion.HIVE_3) - ? "org.apache.iceberg.mr.hive.serde.objectinspector.IcebergTimestampObjectInspectorHive3" - : "org.apache.iceberg.mr.hive.serde.objectinspector.IcebergTimestampObjectInspector"; - - private static final String TIMESTAMPTZ_INSPECTOR_CLASS = - HiveVersion.min(HiveVersion.HIVE_3) - ? "org.apache.iceberg.mr.hive.serde.objectinspector.IcebergTimestampWithZoneObjectInspectorHive3" - : "org.apache.iceberg.mr.hive.serde.objectinspector.IcebergTimestampWithZoneObjectInspector"; + DynMethods.builder("get") + .impl("org.apache.iceberg.mr.hive.serde.objectinspector.IcebergDateObjectInspectorHive3") + .buildStatic() + .invoke(); public static final ObjectInspector TIMESTAMP_INSPECTOR = - DynMethods.builder("get").impl(TIMESTAMP_INSPECTOR_CLASS).buildStatic().invoke(); + DynMethods.builder("get") + .impl( + "org.apache.iceberg.mr.hive.serde.objectinspector.IcebergTimestampObjectInspectorHive3") + .buildStatic() + .invoke(); public static final ObjectInspector TIMESTAMP_INSPECTOR_WITH_TZ = - DynMethods.builder("get").impl(TIMESTAMPTZ_INSPECTOR_CLASS).buildStatic().invoke(); + DynMethods.builder("get") + .impl( + "org.apache.iceberg.mr.hive.serde.objectinspector.IcebergTimestampWithZoneObjectInspectorHive3") + .buildStatic() + .invoke(); public static ObjectInspector create(@Nullable Schema schema) { if (schema == null) { diff --git a/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampObjectInspector.java b/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampObjectInspector.java deleted file mode 100644 index 08c74c9afa4a..000000000000 --- a/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampObjectInspector.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.iceberg.mr.hive.serde.objectinspector; - -import java.sql.Timestamp; -import java.time.LocalDateTime; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; - -public class IcebergTimestampObjectInspector extends AbstractPrimitiveJavaObjectInspector - implements TimestampObjectInspector, WriteObjectInspector { - - private static final IcebergTimestampObjectInspector INSTANCE = - new IcebergTimestampObjectInspector(); - - public static IcebergTimestampObjectInspector get() { - return INSTANCE; - } - - private IcebergTimestampObjectInspector() { - super(TypeInfoFactory.timestampTypeInfo); - } - - @Override - public LocalDateTime convert(Object o) { - return o == null ? null : ((Timestamp) o).toLocalDateTime(); - } - - @Override - public Timestamp getPrimitiveJavaObject(Object o) { - return o == null ? null : Timestamp.valueOf((LocalDateTime) o); - } - - @Override - public TimestampWritable getPrimitiveWritableObject(Object o) { - Timestamp ts = getPrimitiveJavaObject(o); - return ts == null ? null : new TimestampWritable(ts); - } - - @Override - public Object copyObject(Object o) { - if (o instanceof Timestamp) { - Timestamp ts = (Timestamp) o; - Timestamp copy = new Timestamp(ts.getTime()); - copy.setNanos(ts.getNanos()); - return copy; - } else if (o instanceof LocalDateTime) { - LocalDateTime ldt = (LocalDateTime) o; - return LocalDateTime.of(ldt.toLocalDate(), ldt.toLocalTime()); - } else { - return o; - } - } -} diff --git a/hive3/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampObjectInspectorHive3.java b/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampObjectInspectorHive3.java similarity index 100% rename from hive3/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampObjectInspectorHive3.java rename to mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampObjectInspectorHive3.java diff --git a/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampWithZoneObjectInspector.java b/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampWithZoneObjectInspector.java deleted file mode 100644 index f315b0b6d8ea..000000000000 --- a/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampWithZoneObjectInspector.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.iceberg.mr.hive.serde.objectinspector; - -import java.sql.Timestamp; -import java.time.OffsetDateTime; -import java.time.ZoneOffset; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; - -public class IcebergTimestampWithZoneObjectInspector extends AbstractPrimitiveJavaObjectInspector - implements TimestampObjectInspector, WriteObjectInspector { - - private static final IcebergTimestampWithZoneObjectInspector INSTANCE = - new IcebergTimestampWithZoneObjectInspector(); - - public static IcebergTimestampWithZoneObjectInspector get() { - return INSTANCE; - } - - private IcebergTimestampWithZoneObjectInspector() { - super(TypeInfoFactory.timestampTypeInfo); - } - - @Override - public OffsetDateTime convert(Object o) { - return o == null ? null : OffsetDateTime.ofInstant(((Timestamp) o).toInstant(), ZoneOffset.UTC); - } - - @Override - public Timestamp getPrimitiveJavaObject(Object o) { - return o == null ? null : Timestamp.from(((OffsetDateTime) o).toInstant()); - } - - @Override - public TimestampWritable getPrimitiveWritableObject(Object o) { - Timestamp ts = getPrimitiveJavaObject(o); - return ts == null ? null : new TimestampWritable(ts); - } - - @Override - public Object copyObject(Object o) { - if (o instanceof Timestamp) { - Timestamp ts = (Timestamp) o; - Timestamp copy = new Timestamp(ts.getTime()); - copy.setNanos(ts.getNanos()); - return copy; - } else if (o instanceof OffsetDateTime) { - OffsetDateTime odt = (OffsetDateTime) o; - return OffsetDateTime.ofInstant(odt.toInstant(), odt.getOffset()); - } else { - return o; - } - } -} diff --git a/hive3/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampWithZoneObjectInspectorHive3.java b/mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampWithZoneObjectInspectorHive3.java similarity index 100% rename from hive3/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampWithZoneObjectInspectorHive3.java rename to mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/IcebergTimestampWithZoneObjectInspectorHive3.java diff --git a/mr/src/main/java/org/apache/iceberg/mr/mapreduce/IcebergInputFormat.java b/mr/src/main/java/org/apache/iceberg/mr/mapreduce/IcebergInputFormat.java index 9b8d4e9247a2..c8e714abbf13 100644 --- a/mr/src/main/java/org/apache/iceberg/mr/mapreduce/IcebergInputFormat.java +++ b/mr/src/main/java/org/apache/iceberg/mr/mapreduce/IcebergInputFormat.java @@ -63,7 +63,6 @@ import org.apache.iceberg.expressions.Evaluator; import org.apache.iceberg.expressions.Expression; import org.apache.iceberg.expressions.Expressions; -import org.apache.iceberg.hive.HiveVersion; import org.apache.iceberg.io.CloseableIterable; import org.apache.iceberg.io.CloseableIterator; import org.apache.iceberg.io.FileIO; @@ -167,7 +166,7 @@ private List planInputSplits( tasksIterable.forEach( task -> { if (applyResidual && (model == InputFormatConfig.InMemoryDataModel.HIVE)) { - // TODO: We do not support residual evaluation for HIVE and PIG in memory data model + // TODO: We do not support residual evaluation for HIVE in memory data model // yet checkResiduals(task); } @@ -213,11 +212,11 @@ private static final class IcebergRecordReader extends RecordReader private static final String HIVE_VECTORIZED_READER_CLASS = "org.apache.iceberg.mr.hive.vector.HiveVectorizedReader"; - private static final DynMethods.StaticMethod HIVE_VECTORIZED_READER_BUILDER; + private static DynMethods.StaticMethod hiveVectorizedReaderBuilder; static { - if (HiveVersion.min(HiveVersion.HIVE_3)) { - HIVE_VECTORIZED_READER_BUILDER = + try { + hiveVectorizedReaderBuilder = DynMethods.builder("reader") .impl( HIVE_VECTORIZED_READER_CLASS, @@ -226,8 +225,8 @@ private static final class IcebergRecordReader extends RecordReader Map.class, TaskAttemptContext.class) .buildStatic(); - } else { - HIVE_VECTORIZED_READER_BUILDER = null; + } catch (Exception e) { + hiveVectorizedReaderBuilder = null; } } @@ -386,9 +385,8 @@ private CloseableIterable newAvroIterable( switch (inMemoryDataModel) { case HIVE: - // TODO implement value readers for Pig and Hive - throw new UnsupportedOperationException( - "Avro support not yet supported for Pig and Hive"); + // TODO implement value readers for Hive + throw new UnsupportedOperationException("Avro support not yet supported for Hive"); case GENERIC: avroReadBuilder.createReaderFunc( (expIcebergSchema, expAvroSchema) -> @@ -402,19 +400,11 @@ private CloseableIterable newAvroIterable( private CloseableIterable newParquetIterable( InputFile inputFile, FileScanTask task, Schema readSchema) { - Map idToConstant = - constantsMap(task, IdentityPartitionConverters::convertConstant); CloseableIterable parquetIterator = null; switch (inMemoryDataModel) { case HIVE: - if (HiveVersion.min(HiveVersion.HIVE_3)) { - parquetIterator = - HIVE_VECTORIZED_READER_BUILDER.invoke(inputFile, task, idToConstant, context); - } else { - throw new UnsupportedOperationException( - "Vectorized read is unsupported for Hive 2 integration."); - } + parquetIterator = hiveVectorizedReaderBuilder.invoke(inputFile, task, null, context); break; case GENERIC: Parquet.ReadBuilder parquetReadBuilder = @@ -452,13 +442,7 @@ private CloseableIterable newOrcIterable( // ORC does not support reuse containers yet switch (inMemoryDataModel) { case HIVE: - if (HiveVersion.min(HiveVersion.HIVE_3)) { - orcIterator = - HIVE_VECTORIZED_READER_BUILDER.invoke(inputFile, task, idToConstant, context); - } else { - throw new UnsupportedOperationException( - "Vectorized read is unsupported for Hive 2 integration."); - } + orcIterator = hiveVectorizedReaderBuilder.invoke(inputFile, task, null, context); break; case GENERIC: ORC.ReadBuilder orcReadBuilder = diff --git a/mr/src/test/java/org/apache/iceberg/mr/TestIcebergInputFormats.java b/mr/src/test/java/org/apache/iceberg/mr/TestIcebergInputFormats.java index 668703cc5d92..f86fee7542f2 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/TestIcebergInputFormats.java +++ b/mr/src/test/java/org/apache/iceberg/mr/TestIcebergInputFormats.java @@ -222,11 +222,6 @@ public void testFailedResidualFiltering() throws Exception { .isInstanceOf(UnsupportedOperationException.class) .hasMessage( "Filter expression ref(name=\"id\") == 0 is not completely satisfied. Additional rows can be returned not satisfied by the filter expression"); - - assertThatThrownBy(() -> testInputFormat.create(builder.conf())) - .isInstanceOf(UnsupportedOperationException.class) - .hasMessage( - "Filter expression ref(name=\"id\") == 0 is not completely satisfied. Additional rows can be returned not satisfied by the filter expression"); } @TestTemplate diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/TestDeserializer.java b/mr/src/test/java/org/apache/iceberg/mr/hive/TestDeserializer.java index 8f58a36d6265..8eb80e8c56ba 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/TestDeserializer.java +++ b/mr/src/test/java/org/apache/iceberg/mr/hive/TestDeserializer.java @@ -20,7 +20,6 @@ import static org.apache.iceberg.types.Types.NestedField.optional; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assumptions.assumeThat; import java.util.Arrays; import java.util.Collections; @@ -34,7 +33,6 @@ import org.apache.iceberg.Schema; import org.apache.iceberg.data.GenericRecord; import org.apache.iceberg.data.Record; -import org.apache.iceberg.hive.HiveVersion; import org.apache.iceberg.mr.hive.serde.objectinspector.IcebergObjectInspector; import org.apache.iceberg.types.Types; import org.junit.jupiter.api.Test; @@ -158,27 +156,6 @@ public void testListDeserialize() { assertThat(actual).isEqualTo(expected); } - @Test - public void testDeserializeEverySupportedType() { - assumeThat(HiveVersion.min(HiveVersion.HIVE_3)) - .as("No test yet for Hive3 (Date/Timestamp creation)") - .isFalse(); - - Deserializer deserializer = - new Deserializer.Builder() - .schema(HiveIcebergTestUtils.FULL_SCHEMA) - .writerInspector( - (StructObjectInspector) - IcebergObjectInspector.create(HiveIcebergTestUtils.FULL_SCHEMA)) - .sourceInspector(HiveIcebergTestUtils.FULL_SCHEMA_OBJECT_INSPECTOR) - .build(); - - Record expected = HiveIcebergTestUtils.getTestRecord(); - Record actual = deserializer.deserialize(HiveIcebergTestUtils.valuesForTestRecord(expected)); - - HiveIcebergTestUtils.assertEquals(expected, actual); - } - @Test public void testNullDeserialize() { Deserializer deserializer = diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerLocalScan.java b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerLocalScan.java index 9018d4518cbf..2f134be1b9eb 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerLocalScan.java +++ b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerLocalScan.java @@ -52,10 +52,12 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.io.TempDir; +@Disabled // Fix of HIVE-21584 is not released on Hive 3.1 @ExtendWith(ParameterizedTestExtension.class) public class TestHiveIcebergStorageHandlerLocalScan { diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java index 6a297e4913e4..79e6f599271f 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java +++ b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerNoScan.java @@ -55,7 +55,6 @@ import org.apache.iceberg.exceptions.NoSuchTableException; import org.apache.iceberg.hadoop.Util; import org.apache.iceberg.hive.HiveSchemaUtil; -import org.apache.iceberg.hive.HiveVersion; import org.apache.iceberg.mr.Catalogs; import org.apache.iceberg.mr.InputFormatConfig; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; @@ -70,10 +69,12 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.io.TempDir; +@Disabled // Fix of HIVE-21584 is not released on Hive 3.1 @ExtendWith(ParameterizedTestExtension.class) public class TestHiveIcebergStorageHandlerNoScan { private static final PartitionSpec SPEC = PartitionSpec.unpartitioned(); @@ -770,9 +771,8 @@ public void testIcebergAndHmsTableProperties() throws Exception { if (Catalogs.hiveCatalog(shell.getHiveConf(), tableProperties)) { expectedIcebergProperties.put(TableProperties.ENGINE_HIVE_ENABLED, "true"); } - if (HiveVersion.min(HiveVersion.HIVE_3)) { - expectedIcebergProperties.put("bucketing_version", "2"); - } + + expectedIcebergProperties.put("bucketing_version", "2"); assertThat(icebergTable.properties()).isEqualTo((expectedIcebergProperties)); if (Catalogs.hiveCatalog(shell.getHiveConf(), tableProperties)) { @@ -868,7 +868,7 @@ public void testIcebergHMSPropertiesTranslation() throws Exception { TableIdentifier identifier = TableIdentifier.of("default", "customers"); - // Create HMS table with with a property to be translated + // Create HMS table with a property to be translated shell.executeStatement( String.format( "CREATE EXTERNAL TABLE default.customers " diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerTimezone.java b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerTimezone.java index b8a454d01f02..9447ad865790 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerTimezone.java +++ b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerTimezone.java @@ -46,10 +46,12 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.io.TempDir; +@Disabled // Fix of HIVE-21584 is not released on Hive 3.1 @ExtendWith(ParameterizedTestExtension.class) public class TestHiveIcebergStorageHandlerTimezone { private static final Optional> DATE_FORMAT = diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerWithEngine.java b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerWithEngine.java index ce3a6fd92441..dc1b3cc319b3 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerWithEngine.java +++ b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerWithEngine.java @@ -48,7 +48,6 @@ import org.apache.iceberg.data.Record; import org.apache.iceberg.hadoop.ConfigProperties; import org.apache.iceberg.hive.HiveSchemaUtil; -import org.apache.iceberg.hive.HiveVersion; import org.apache.iceberg.mr.InputFormatConfig; import org.apache.iceberg.mr.TestHelper; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; @@ -60,11 +59,13 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.Timeout; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.io.TempDir; +@Disabled // Fix of HIVE-21584 is not released on Hive 3.1 @ExtendWith(ParameterizedTestExtension.class) @Timeout(value = 200_000, unit = TimeUnit.MILLISECONDS) public class TestHiveIcebergStorageHandlerWithEngine { @@ -129,8 +130,7 @@ public static Collection parameters() { new Object[] {fileFormat, engine, TestTables.TestTableType.HIVE_CATALOG, false}); // test for vectorization=ON in case of ORC format and Tez engine if ((fileFormat == FileFormat.PARQUET || fileFormat == FileFormat.ORC) - && "tez".equals(engine) - && HiveVersion.min(HiveVersion.HIVE_3)) { + && "tez".equals(engine)) { testParams.add( new Object[] {fileFormat, engine, TestTables.TestTableType.HIVE_CATALOG, true}); } diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerWithMultipleCatalogs.java b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerWithMultipleCatalogs.java index c2cf8f675007..cb99ad55dab4 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerWithMultipleCatalogs.java +++ b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergStorageHandlerWithMultipleCatalogs.java @@ -37,10 +37,12 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.io.TempDir; +@Disabled // Fix of HIVE-21584 is not released on Hive 3.1 @ExtendWith(ParameterizedTestExtension.class) public class TestHiveIcebergStorageHandlerWithMultipleCatalogs { diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergWithHiveAutogatherEnable.java b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergWithHiveAutogatherEnable.java index 6b3bddd637c2..ef1bcabbb0cc 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergWithHiveAutogatherEnable.java +++ b/mr/src/test/java/org/apache/iceberg/mr/hive/TestHiveIcebergWithHiveAutogatherEnable.java @@ -38,10 +38,12 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.io.TempDir; +@Disabled // Fix of HIVE-21584 is not released on Hive 3.1 @ExtendWith(ParameterizedTestExtension.class) public class TestHiveIcebergWithHiveAutogatherEnable { diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/TestTables.java b/mr/src/test/java/org/apache/iceberg/mr/hive/TestTables.java index f2710290d5c2..81f44b22a09e 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/TestTables.java +++ b/mr/src/test/java/org/apache/iceberg/mr/hive/TestTables.java @@ -51,7 +51,6 @@ import org.apache.iceberg.hadoop.HadoopCatalog; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.hive.HiveCatalog; -import org.apache.iceberg.hive.HiveVersion; import org.apache.iceberg.mr.Catalogs; import org.apache.iceberg.mr.InputFormatConfig; import org.apache.iceberg.mr.TestCatalogs; @@ -418,12 +417,7 @@ static class CustomCatalogTestTables extends TestTables { private final String warehouseLocation; CustomCatalogTestTables(Configuration conf, Path temp, String catalogName) throws IOException { - this( - conf, - temp, - (HiveVersion.min(HiveVersion.HIVE_3) ? "file:" : "") - + temp.resolve(Paths.get("custom", "warehouse")), - catalogName); + this(conf, temp, "file:" + temp.resolve(Paths.get("custom", "warehouse")), catalogName); } CustomCatalogTestTables( @@ -452,12 +446,7 @@ static class HadoopCatalogTestTables extends TestTables { private final String warehouseLocation; HadoopCatalogTestTables(Configuration conf, Path temp, String catalogName) throws IOException { - this( - conf, - temp, - (HiveVersion.min(HiveVersion.HIVE_3) ? "file:" : "") - + temp.resolve(Paths.get("hadoop", "warehouse")), - catalogName); + this(conf, temp, "file:" + temp.resolve(Paths.get("hadoop", "warehouse")), catalogName); } HadoopCatalogTestTables( diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergDateObjectInspector.java b/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergDateObjectInspector.java deleted file mode 100644 index 6e03fae861f9..000000000000 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergDateObjectInspector.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.iceberg.mr.hive.serde.objectinspector; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.sql.Date; -import java.time.LocalDate; -import org.apache.hadoop.hive.serde2.io.DateWritable; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.junit.jupiter.api.Test; - -public class TestIcebergDateObjectInspector { - - @Test - public void testIcebergDateObjectInspector() { - DateObjectInspector oi = IcebergDateObjectInspector.get(); - - assertThat(oi.getCategory()).isEqualTo(ObjectInspector.Category.PRIMITIVE); - assertThat(oi.getPrimitiveCategory()) - .isEqualTo(PrimitiveObjectInspector.PrimitiveCategory.DATE); - - assertThat(oi.getTypeInfo()).isEqualTo(TypeInfoFactory.dateTypeInfo); - assertThat(oi.getTypeName()).isEqualTo(TypeInfoFactory.dateTypeInfo.getTypeName()); - - assertThat(oi.getJavaPrimitiveClass()).isEqualTo(Date.class); - assertThat(oi.getPrimitiveWritableClass()).isEqualTo(DateWritable.class); - - assertThat(oi.copyObject(null)).isNull(); - assertThat(oi.getPrimitiveJavaObject(null)).isNull(); - assertThat(oi.getPrimitiveWritableObject(null)).isNull(); - - LocalDate local = LocalDate.of(2020, 1, 1); - Date date = Date.valueOf("2020-01-01"); - - assertThat(oi.getPrimitiveJavaObject(local)).isEqualTo(date); - assertThat(oi.getPrimitiveWritableObject(local)).isEqualTo(new DateWritable(date)); - - Date copy = (Date) oi.copyObject(date); - - assertThat(copy).isEqualTo(date); - assertThat(copy).isNotSameAs(date); - - assertThat(oi.preferWritable()).isFalse(); - } -} diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergObjectInspector.java b/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergObjectInspector.java index c2646376890c..5b797e9f82fe 100644 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergObjectInspector.java +++ b/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergObjectInspector.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.iceberg.Schema; -import org.apache.iceberg.hive.HiveVersion; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.types.Types; import org.junit.jupiter.api.Test; @@ -99,14 +98,8 @@ public void testIcebergObjectInspector() { assertThat(dateField.getFieldID()).isEqualTo(3); assertThat(dateField.getFieldName()).isEqualTo("date_field"); assertThat(dateField.getFieldComment()).isEqualTo("date comment"); - if (HiveVersion.min(HiveVersion.HIVE_3)) { - assertThat(dateField.getFieldObjectInspector().getClass().getName()) - .isEqualTo( - "org.apache.iceberg.mr.hive.serde.objectinspector.IcebergDateObjectInspectorHive3"); - } else { - assertThat(dateField.getFieldObjectInspector().getClass().getName()) - .isEqualTo("org.apache.iceberg.mr.hive.serde.objectinspector.IcebergDateObjectInspector"); - } + assertThat(dateField.getFieldObjectInspector().getClass().getSimpleName()) + .isEqualTo("IcebergDateObjectInspectorHive3"); // decimal StructField decimalField = soi.getStructFieldRef("decimal_field"); @@ -168,26 +161,16 @@ public void testIcebergObjectInspector() { assertThat(timestampField.getFieldID()).isEqualTo(11); assertThat(timestampField.getFieldName()).isEqualTo("timestamp_field"); assertThat(timestampField.getFieldComment()).isEqualTo("timestamp comment"); - if (HiveVersion.min(HiveVersion.HIVE_3)) { - assertThat(timestampField.getFieldObjectInspector().getClass().getSimpleName()) - .isEqualTo("IcebergTimestampObjectInspectorHive3"); - } else { - assertThat(timestampField.getFieldObjectInspector()) - .isEqualTo(IcebergTimestampObjectInspector.get()); - } + assertThat(timestampField.getFieldObjectInspector().getClass().getSimpleName()) + .isEqualTo("IcebergTimestampObjectInspectorHive3"); // timestamp with tz StructField timestampTzField = soi.getStructFieldRef("timestamptz_field"); assertThat(timestampTzField.getFieldID()).isEqualTo(12); assertThat(timestampTzField.getFieldName()).isEqualTo("timestamptz_field"); assertThat(timestampTzField.getFieldComment()).isEqualTo("timestamptz comment"); - if (HiveVersion.min(HiveVersion.HIVE_3)) { - assertThat(timestampTzField.getFieldObjectInspector().getClass().getSimpleName()) - .isEqualTo("IcebergTimestampWithZoneObjectInspectorHive3"); - } else { - assertThat(timestampTzField.getFieldObjectInspector()) - .isEqualTo(IcebergTimestampWithZoneObjectInspector.get()); - } + assertThat(timestampTzField.getFieldObjectInspector().getClass().getSimpleName()) + .isEqualTo("IcebergTimestampWithZoneObjectInspectorHive3"); // UUID StructField uuidField = soi.getStructFieldRef("uuid_field"); diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergTimestampObjectInspector.java b/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergTimestampObjectInspector.java deleted file mode 100644 index ea40cc20420a..000000000000 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergTimestampObjectInspector.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.iceberg.mr.hive.serde.objectinspector; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.sql.Timestamp; -import java.time.LocalDateTime; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.junit.jupiter.api.Test; - -public class TestIcebergTimestampObjectInspector { - - @Test - public void testIcebergTimestampObjectInspector() { - IcebergTimestampObjectInspector oi = IcebergTimestampObjectInspector.get(); - - assertThat(oi.getCategory()).isEqualTo(ObjectInspector.Category.PRIMITIVE); - assertThat(oi.getPrimitiveCategory()) - .isEqualTo(PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP); - - assertThat(oi.getTypeInfo()).isEqualTo(TypeInfoFactory.timestampTypeInfo); - assertThat(oi.getTypeName()).isEqualTo(TypeInfoFactory.timestampTypeInfo.getTypeName()); - - assertThat(oi.getJavaPrimitiveClass()).isEqualTo(Timestamp.class); - assertThat(oi.getPrimitiveWritableClass()).isEqualTo(TimestampWritable.class); - - assertThat(oi.copyObject(null)).isNull(); - assertThat(oi.getPrimitiveJavaObject(null)).isNull(); - assertThat(oi.getPrimitiveWritableObject(null)).isNull(); - assertThat(oi.convert(null)).isNull(); - - LocalDateTime local = LocalDateTime.of(2020, 1, 1, 12, 55, 30, 5560000); - Timestamp ts = Timestamp.valueOf(local); - - assertThat(oi.getPrimitiveJavaObject(local)).isEqualTo(ts); - assertThat(oi.getPrimitiveWritableObject(local)).isEqualTo(new TimestampWritable(ts)); - - Timestamp copy = (Timestamp) oi.copyObject(ts); - - assertThat(copy).isEqualTo(ts); - assertThat(copy).isNotSameAs(ts); - - assertThat(oi.preferWritable()).isFalse(); - - assertThat(oi.convert(ts)).isEqualTo(local); - } -} diff --git a/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergTimestampWithZoneObjectInspector.java b/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergTimestampWithZoneObjectInspector.java deleted file mode 100644 index 1b16e6e02c0e..000000000000 --- a/mr/src/test/java/org/apache/iceberg/mr/hive/serde/objectinspector/TestIcebergTimestampWithZoneObjectInspector.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.iceberg.mr.hive.serde.objectinspector; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.sql.Timestamp; -import java.time.LocalDateTime; -import java.time.OffsetDateTime; -import java.time.ZoneOffset; -import org.apache.hadoop.hive.serde2.io.TimestampWritable; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; -import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.junit.jupiter.api.Test; - -public class TestIcebergTimestampWithZoneObjectInspector { - - @Test - public void testIcebergTimestampObjectInspectorWithUTCAdjustment() { - IcebergTimestampWithZoneObjectInspector oi = IcebergTimestampWithZoneObjectInspector.get(); - - assertThat(oi.getCategory()).isEqualTo(ObjectInspector.Category.PRIMITIVE); - assertThat(oi.getPrimitiveCategory()) - .isEqualTo(PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP); - - assertThat(oi.getTypeInfo()).isEqualTo(TypeInfoFactory.timestampTypeInfo); - assertThat(oi.getTypeName()).isEqualTo(TypeInfoFactory.timestampTypeInfo.getTypeName()); - - assertThat(oi.getJavaPrimitiveClass()).isEqualTo(Timestamp.class); - assertThat(oi.getPrimitiveWritableClass()).isEqualTo(TimestampWritable.class); - - assertThat(oi.copyObject(null)).isNull(); - assertThat(oi.getPrimitiveJavaObject(null)).isNull(); - assertThat(oi.getPrimitiveWritableObject(null)).isNull(); - assertThat(oi.convert(null)).isNull(); - - LocalDateTime local = LocalDateTime.of(2020, 1, 1, 16, 45, 33, 456000); - OffsetDateTime offsetDateTime = OffsetDateTime.of(local, ZoneOffset.ofHours(-5)); - Timestamp ts = Timestamp.from(offsetDateTime.toInstant()); - - assertThat(oi.getPrimitiveJavaObject(offsetDateTime)).isEqualTo(ts); - assertThat(oi.getPrimitiveWritableObject(offsetDateTime)).isEqualTo(new TimestampWritable(ts)); - - Timestamp copy = (Timestamp) oi.copyObject(ts); - - assertThat(copy).isEqualTo(ts); - assertThat(copy).isNotSameAs(ts); - - assertThat(oi.preferWritable()).isFalse(); - - assertThat(oi.convert(ts)) - .isEqualTo( - OffsetDateTime.ofInstant(local.toInstant(ZoneOffset.ofHours(-5)), ZoneOffset.UTC)); - - assertThat(offsetDateTime.withOffsetSameInstant(ZoneOffset.UTC)) - .isEqualTo(oi.convert(Timestamp.from(offsetDateTime.toInstant()))); - } -}