Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.io.FileUtils;
import org.apache.directory.api.util.Hex;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.io.ByteStreams;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,9 +134,7 @@ public boolean shouldSkipCombine(Path path, Configuration conf) {
return true;
}

// Override annotation commented out, since this interface method has been introduced only in Hive
// 3
// @Override
@Override
public VectorizedSupport.Support[] getSupportedFeatures() {
return new VectorizedSupport.Support[0];
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,7 @@ public void configureOutputJobProperties(TableDesc tableDesc, Map<String, String
@Override
public void configureTableJobProperties(TableDesc tableDesc, Map<String, String> map) {}

// Override annotation commented out, since this interface method has been introduced only in Hive
// 3
// @Override
@Override
public void configureInputJobCredentials(TableDesc tableDesc, Map<String, String> secrets) {}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@
*/
package org.apache.iceberg.mr.hive.serde.objectinspector;

import java.sql.Date;
import java.time.LocalDate;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
Expand All @@ -41,12 +41,12 @@ private IcebergDateObjectInspector() {

@Override
public Date getPrimitiveJavaObject(Object o) {
return o == null ? null : Date.valueOf((LocalDate) o);
return o == null ? null : Date.ofEpochDay((int) ((LocalDate) o).toEpochDay());
}

@Override
public DateWritable getPrimitiveWritableObject(Object o) {
return o == null ? null : new DateWritable(DateTimeUtil.daysFromDate((LocalDate) o));
public DateWritableV2 getPrimitiveWritableObject(Object o) {
return o == null ? null : new DateWritableV2(DateTimeUtil.daysFromDate((LocalDate) o));
}

@Override
Expand All @@ -56,7 +56,7 @@ public Object copyObject(Object o) {
}

if (o instanceof Date) {
return new Date(((Date) o).getTime());
return Date.ofEpochDay(((Date) o).toEpochDay());
} else if (o instanceof LocalDate) {
return LocalDate.of(
((LocalDate) o).getYear(), ((LocalDate) o).getMonth(), ((LocalDate) o).getDayOfMonth());
Expand All @@ -67,6 +67,6 @@ public Object copyObject(Object o) {

@Override
public LocalDate convert(Object o) {
return o == null ? null : ((Date) o).toLocalDate();
return o == null ? null : LocalDate.ofEpochDay(((Date) o).toEpochDay());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@
*/
package org.apache.iceberg.mr.hive.serde.objectinspector;

import java.sql.Timestamp;
import java.time.LocalDateTime;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import java.time.ZoneOffset;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
Expand All @@ -41,27 +42,27 @@ private IcebergTimestampObjectInspector() {

@Override
public LocalDateTime convert(Object o) {
return o == null ? null : ((Timestamp) o).toLocalDateTime();
return o == null ? null : ((Timestamp) o).toSqlTimestamp().toLocalDateTime();
}

@Override
public Timestamp getPrimitiveJavaObject(Object o) {
return o == null ? null : Timestamp.valueOf((LocalDateTime) o);
return o == null
? null
: Timestamp.ofEpochMilli(((LocalDateTime) o).toInstant(ZoneOffset.UTC).toEpochMilli());
}

@Override
public TimestampWritable getPrimitiveWritableObject(Object o) {
public TimestampWritableV2 getPrimitiveWritableObject(Object o) {
Timestamp ts = getPrimitiveJavaObject(o);
return ts == null ? null : new TimestampWritable(ts);
return ts == null ? null : new TimestampWritableV2(ts);
}

@Override
public Object copyObject(Object o) {
if (o instanceof Timestamp) {
Timestamp ts = (Timestamp) o;
Timestamp copy = new Timestamp(ts.getTime());
copy.setNanos(ts.getNanos());
return copy;
return Timestamp.ofEpochMilli(ts.toEpochMilli(), ts.getNanos());
} else if (o instanceof LocalDateTime) {
LocalDateTime ldt = (LocalDateTime) o;
return LocalDateTime.of(ldt.toLocalDate(), ldt.toLocalTime());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@
*/
package org.apache.iceberg.mr.hive.serde.objectinspector;

import java.sql.Timestamp;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
Expand All @@ -42,25 +42,29 @@ private IcebergTimestampWithZoneObjectInspector() {

@Override
public OffsetDateTime convert(Object o) {
return o == null ? null : OffsetDateTime.ofInstant(((Timestamp) o).toInstant(), ZoneOffset.UTC);
return o == null
? null
: OffsetDateTime.ofInstant(((Timestamp) o).toSqlTimestamp().toInstant(), ZoneOffset.UTC);
}

@Override
public Timestamp getPrimitiveJavaObject(Object o) {
return o == null ? null : Timestamp.from(((OffsetDateTime) o).toInstant());
return o == null
? null
: Timestamp.ofEpochMilli(((OffsetDateTime) o).toInstant().toEpochMilli());
}

@Override
public TimestampWritable getPrimitiveWritableObject(Object o) {
public TimestampWritableV2 getPrimitiveWritableObject(Object o) {
Timestamp ts = getPrimitiveJavaObject(o);
return ts == null ? null : new TimestampWritable(ts);
return ts == null ? null : new TimestampWritableV2(ts);
}

@Override
public Object copyObject(Object o) {
if (o instanceof Timestamp) {
Timestamp ts = (Timestamp) o;
Timestamp copy = new Timestamp(ts.getTime());
Timestamp copy = Timestamp.ofEpochMilli(ts.toEpochMilli());
copy.setNanos(ts.getNanos());
return copy;
} else if (o instanceof OffsetDateTime) {
Expand Down
4 changes: 2 additions & 2 deletions versions.props
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
org.slf4j:* = 1.7.36
org.apache.avro:avro = 1.11.1
org.apache.calcite:* = 1.10.0
org.apache.hadoop:* = 2.7.3
org.apache.hive:* = 2.3.8
org.apache.hadoop:* = 3.3.4
org.apache.hive:* = 3.1.3
org.apache.httpcomponents.client5:* = 5.2.1
org.apache.orc:* = 1.8.3
org.apache.parquet:* = 1.12.3
Expand Down