diff --git a/core/src/main/java/org/apache/iceberg/BaseOverwriteFiles.java b/core/src/main/java/org/apache/iceberg/BaseOverwriteFiles.java index 22a218d83bb5..e1fbb0f942c5 100644 --- a/core/src/main/java/org/apache/iceberg/BaseOverwriteFiles.java +++ b/core/src/main/java/org/apache/iceberg/BaseOverwriteFiles.java @@ -92,6 +92,7 @@ public OverwriteFiles caseSensitive(boolean isCaseSensitive) { return this; } + @Override public OverwriteFiles validateNoConflictingAppends(Expression newConflictDetectionFilter) { Preconditions.checkArgument(newConflictDetectionFilter != null, "Conflict detection filter cannot be null"); this.conflictDetectionFilter = newConflictDetectionFilter; diff --git a/core/src/main/java/org/apache/iceberg/types/FixupTypes.java b/core/src/main/java/org/apache/iceberg/types/FixupTypes.java index b203ca131807..d1833558a2f6 100644 --- a/core/src/main/java/org/apache/iceberg/types/FixupTypes.java +++ b/core/src/main/java/org/apache/iceberg/types/FixupTypes.java @@ -33,7 +33,7 @@ public abstract class FixupTypes extends TypeUtil.CustomOrderSchemaVisitor private final Schema referenceSchema; private Type sourceType; - public FixupTypes(Schema referenceSchema) { + protected FixupTypes(Schema referenceSchema) { this.referenceSchema = referenceSchema; this.sourceType = referenceSchema.asStruct(); } diff --git a/data/src/main/java/org/apache/iceberg/data/DeleteFilter.java b/data/src/main/java/org/apache/iceberg/data/DeleteFilter.java index 2909a91e2c45..1ddfa270320c 100644 --- a/data/src/main/java/org/apache/iceberg/data/DeleteFilter.java +++ b/data/src/main/java/org/apache/iceberg/data/DeleteFilter.java @@ -65,7 +65,7 @@ public abstract class DeleteFilter { private final Schema requiredSchema; private final Accessor posAccessor; - public DeleteFilter(FileScanTask task, Schema tableSchema, Schema requestedSchema) { + protected DeleteFilter(FileScanTask task, Schema tableSchema, Schema requestedSchema) { this.setFilterThreshold = DEFAULT_SET_FILTER_THRESHOLD; this.dataFile = task.file(); diff --git a/spark/src/main/java/org/apache/iceberg/spark/data/SparkOrcValueReaders.java b/spark/src/main/java/org/apache/iceberg/spark/data/SparkOrcValueReaders.java index 70e8b08fc4a0..f35ab7a17c63 100644 --- a/spark/src/main/java/org/apache/iceberg/spark/data/SparkOrcValueReaders.java +++ b/spark/src/main/java/org/apache/iceberg/spark/data/SparkOrcValueReaders.java @@ -181,7 +181,7 @@ private TimestampTzReader() { @Override public Long nonNullRead(ColumnVector vector, int row) { TimestampColumnVector tcv = (TimestampColumnVector) vector; - return (Math.floorDiv(tcv.time[row], 1_000)) * 1_000_000 + Math.floorDiv(tcv.nanos[row], 1000); + return Math.floorDiv(tcv.time[row], 1_000) * 1_000_000 + Math.floorDiv(tcv.nanos[row], 1000); } } diff --git a/spark/src/main/java/org/apache/iceberg/spark/data/SparkOrcValueWriters.java b/spark/src/main/java/org/apache/iceberg/spark/data/SparkOrcValueWriters.java index 9148b5a8a89f..73f8969e492b 100644 --- a/spark/src/main/java/org/apache/iceberg/spark/data/SparkOrcValueWriters.java +++ b/spark/src/main/java/org/apache/iceberg/spark/data/SparkOrcValueWriters.java @@ -185,7 +185,7 @@ public void nonNullWrite(int rowId, int column, SpecializedGetters data, ColumnV TimestampColumnVector cv = (TimestampColumnVector) output; long micros = data.getLong(column); // it could be negative. cv.time[rowId] = Math.floorDiv(micros, 1_000); // millis - cv.nanos[rowId] = (int) (Math.floorMod(micros, 1_000_000)) * 1_000; // nanos + cv.nanos[rowId] = (int) Math.floorMod(micros, 1_000_000) * 1_000; // nanos } }