Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 10 additions & 17 deletions api/src/main/java/org/apache/iceberg/transforms/Dates.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@
*/
package org.apache.iceberg.transforms;

import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneOffset;
import java.time.temporal.ChronoUnit;
import org.apache.iceberg.expressions.BoundPredicate;
import org.apache.iceberg.expressions.BoundTransform;
Expand All @@ -30,6 +27,7 @@
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.DateTimeUtil;
import org.apache.iceberg.util.SerializableFunction;

enum Dates implements Transform<Integer, Integer> {
Expand All @@ -50,24 +48,19 @@ public Integer apply(Integer days) {
return null;
}

if (granularity == ChronoUnit.DAYS) {
return days;
}

if (days >= 0) {
LocalDate date = EPOCH.plusDays(days);
return (int) granularity.between(EPOCH, date);
} else {
// add 1 day to the value to account for the case where there is exactly 1 unit between the
// date and epoch because the result will always be decremented.
LocalDate date = EPOCH.plusDays(days + 1);
return (int) granularity.between(EPOCH, date) - 1;
switch (granularity) {
case YEARS:
return DateTimeUtil.daysToYears(days);
case MONTHS:
return DateTimeUtil.daysToMonths(days);
case DAYS:
return days;
default:
throw new UnsupportedOperationException("Unsupported time unit: " + granularity);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This block is now similar to toHumanString below.

}
}
}

private static final LocalDate EPOCH =
Instant.ofEpochSecond(0).atOffset(ZoneOffset.UTC).toLocalDate();
private final ChronoUnit granularity;
private final String name;
private final Apply apply;
Expand Down
32 changes: 12 additions & 20 deletions api/src/main/java/org/apache/iceberg/transforms/Timestamps.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@
*/
package org.apache.iceberg.transforms;

import java.time.Instant;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.temporal.ChronoUnit;
import org.apache.iceberg.expressions.BoundPredicate;
import org.apache.iceberg.expressions.BoundTransform;
Expand All @@ -30,6 +27,7 @@
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.DateTimeUtil;
import org.apache.iceberg.util.SerializableFunction;

enum Timestamps implements Transform<Long, Integer> {
Expand All @@ -51,27 +49,21 @@ public Integer apply(Long timestampMicros) {
return null;
}

if (timestampMicros >= 0) {
OffsetDateTime timestamp =
Instant.ofEpochSecond(
Math.floorDiv(timestampMicros, 1_000_000),
Math.floorMod(timestampMicros, 1_000_000) * 1000)
.atOffset(ZoneOffset.UTC);
return (int) granularity.between(EPOCH, timestamp);
} else {
// add 1 micro to the value to account for the case where there is exactly 1 unit between
// the timestamp and epoch because the result will always be decremented.
OffsetDateTime timestamp =
Instant.ofEpochSecond(
Math.floorDiv(timestampMicros, 1_000_000),
Math.floorMod(timestampMicros + 1, 1_000_000) * 1000)
.atOffset(ZoneOffset.UTC);
return (int) granularity.between(EPOCH, timestamp) - 1;
switch (granularity) {
case YEARS:
return DateTimeUtil.microsToYears(timestampMicros);
case MONTHS:
return DateTimeUtil.microsToMonths(timestampMicros);
case DAYS:
return DateTimeUtil.microsToDays(timestampMicros);
case HOURS:
return DateTimeUtil.microsToHours(timestampMicros);
default:
throw new UnsupportedOperationException("Unsupported time unit: " + granularity);
}
}
}

private static final OffsetDateTime EPOCH = Instant.ofEpochSecond(0).atOffset(ZoneOffset.UTC);
private final ChronoUnit granularity;
private final String name;
private final SerializableFunction<Long, Integer> apply;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ private DateTimeUtil() {}
public static final OffsetDateTime EPOCH = Instant.ofEpochSecond(0).atOffset(ZoneOffset.UTC);
public static final LocalDate EPOCH_DAY = EPOCH.toLocalDate();
public static final long MICROS_PER_MILLIS = 1000L;
public static final long MICROS_PER_SECOND = 1_000_000L;

public static LocalDate dateFromDays(int daysFromEpoch) {
return ChronoUnit.DAYS.addTo(EPOCH_DAY, daysFromEpoch);
Expand Down Expand Up @@ -133,4 +134,58 @@ public static long isoTimestampToMicros(String timestampString) {
return microsFromTimestamp(
LocalDateTime.parse(timestampString, DateTimeFormatter.ISO_LOCAL_DATE_TIME));
}

public static int daysToYears(int days) {
return convertDays(days, ChronoUnit.YEARS);
}

public static int daysToMonths(int days) {
return convertDays(days, ChronoUnit.MONTHS);
}

private static int convertDays(int days, ChronoUnit granularity) {
if (days >= 0) {
LocalDate date = EPOCH_DAY.plusDays(days);
return (int) granularity.between(EPOCH_DAY, date);
} else {
// add 1 day to the value to account for the case where there is exactly 1 unit between the
// date and epoch because the result will always be decremented.
LocalDate date = EPOCH_DAY.plusDays(days + 1);
return (int) granularity.between(EPOCH_DAY, date) - 1;
}
}

public static int microsToYears(long micros) {
return convertMicros(micros, ChronoUnit.YEARS);
}

public static int microsToMonths(long micros) {
return convertMicros(micros, ChronoUnit.MONTHS);
}

public static int microsToDays(long micros) {
return convertMicros(micros, ChronoUnit.DAYS);
}

public static int microsToHours(long micros) {
return convertMicros(micros, ChronoUnit.HOURS);
}

private static int convertMicros(long micros, ChronoUnit granularity) {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These conversions are covered by TestDates and TestTimestamps suites.

if (micros >= 0) {
long epochSecond = Math.floorDiv(micros, MICROS_PER_SECOND);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: am I missing something or can we just pull these out of individual blocks?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we can pull only epochSecond so I wasn't sure it was worth it. Let me do that.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I moved that var but it became somehow disconnected from the rest of the logic in each branch. I kept it separate like in the original snippet.

long nanoAdjustment = Math.floorMod(micros, MICROS_PER_SECOND) * 1000;
return (int) granularity.between(EPOCH, toOffsetDateTime(epochSecond, nanoAdjustment));
} else {
// add 1 micro to the value to account for the case where there is exactly 1 unit between
// the timestamp and epoch because the result will always be decremented.
long epochSecond = Math.floorDiv(micros, MICROS_PER_SECOND);
long nanoAdjustment = Math.floorMod(micros + 1, MICROS_PER_SECOND) * 1000;
return (int) granularity.between(EPOCH, toOffsetDateTime(epochSecond, nanoAdjustment)) - 1;
}
}

private static OffsetDateTime toOffsetDateTime(long epochSecond, long nanoAdjustment) {
return Instant.ofEpochSecond(epochSecond, nanoAdjustment).atOffset(ZoneOffset.UTC);
}
}