Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -195,10 +195,11 @@ public CloseableIterator<Integer> iterator() {
}
});

AtomicInteger consumedCounter = new AtomicInteger(0);
try (CloseableIterable<Integer> concat = CloseableIterable.concat(transform)) {
concat.forEach(c -> c++);
concat.forEach(count -> consumedCounter.getAndIncrement());
}
Assertions.assertThat(counter.get()).isEqualTo(items.size());
Assertions.assertThat(counter.get()).isEqualTo(items.size()).isEqualTo(consumedCounter.get());
}

@Test
Expand Down
9 changes: 9 additions & 0 deletions baseline.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,15 @@ subprojects {
apply plugin: 'com.palantir.baseline-release-compatibility'
apply plugin: 'com.diffplug.spotless'

pluginManager.withPlugin('com.palantir.baseline-checkstyle') {
checkstyle {
// com.palantir.baseline:gradle-baseline-java:4.42.0 (the last version supporting Java 8) pulls
// in an old version of the checkstyle(9.1), which has this OutOfMemory bug https://github.com/checkstyle/checkstyle/issues/10934.
// So, override its checkstyle version using CheckstyleExtension to 9.3 (the latest java 8 supported version) which contains a fix.
toolVersion '9.3'
}
}

pluginManager.withPlugin('com.diffplug.spotless') {
spotless {
java {
Expand Down
4 changes: 0 additions & 4 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,6 @@ buildscript {
// in an old version of the errorprone, which doesn't work w/ Gradle 8, so bump errorpone as
// well.
classpath "net.ltgt.gradle:gradle-errorprone-plugin:3.0.1"
// com.palantir.baseline:gradle-baseline-java:4.42.0 (the last version supporting Java 8) pulls
// in an old version of the checkstyle(9.1), which has this OutOfMemory bug https://github.com/checkstyle/checkstyle/issues/10934
// So, bump checkstyle to the latest java 8 supported version(9.3) which contains the fix.
classpath "com.puppycrawl.tools:checkstyle:9.3"

classpath 'com.diffplug.spotless:spotless-plugin-gradle:6.13.0'
classpath 'gradle.plugin.org.inferred:gradle-processors:3.7.0'
Expand Down
2 changes: 1 addition & 1 deletion core/src/test/java/org/apache/iceberg/TestMetrics.java
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ public void testMetricsForTopLevelWithMultipleRowGroup() throws Exception {

for (int i = 0; i < recordCount; i++) {
Record newRecord = GenericRecord.create(SIMPLE_SCHEMA);
newRecord.setField("booleanCol", i == 0 ? false : true);
newRecord.setField("booleanCol", i != 0);
newRecord.setField("intCol", i + 1);
newRecord.setField("longCol", i == 0 ? null : i + 1L);
newRecord.setField("floatCol", i + 1.0F);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema(
schemaConvertedFromIceberg.getFields().stream()
.map(
field -> {
org.apache.avro.Schema.Field updatedField = field;
if (field.name().equals("time_field")) {
// Iceberg's AvroSchemaUtil uses timestamp-micros with Long value for time
// field, while AvroToRowDataConverters#convertToTime() always looks for
Expand All @@ -124,10 +125,10 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema(
LogicalTypes.timeMillis()
.addToSchema(
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT));
field = new org.apache.avro.Schema.Field("time_field", fieldSchema);
updatedField = new org.apache.avro.Schema.Field("time_field", fieldSchema);
}

return new org.apache.avro.Schema.Field(field, field.schema());
return new org.apache.avro.Schema.Field(updatedField, updatedField.schema());
})
.collect(Collectors.toList());
return org.apache.avro.Schema.createRecord(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema(
schemaConvertedFromIceberg.getFields().stream()
.map(
field -> {
org.apache.avro.Schema.Field updatedField = field;
if (field.name().equals("time_field")) {
// Iceberg's AvroSchemaUtil uses timestamp-micros with Long value for time
// field, while AvroToRowDataConverters#convertToTime() always looks for
Expand All @@ -124,10 +125,10 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema(
LogicalTypes.timeMillis()
.addToSchema(
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT));
field = new org.apache.avro.Schema.Field("time_field", fieldSchema);
updatedField = new org.apache.avro.Schema.Field("time_field", fieldSchema);
}

return new org.apache.avro.Schema.Field(field, field.schema());
return new org.apache.avro.Schema.Field(updatedField, updatedField.schema());
})
.collect(Collectors.toList());
return org.apache.avro.Schema.createRecord(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema(
schemaConvertedFromIceberg.getFields().stream()
.map(
field -> {
org.apache.avro.Schema.Field updatedField = field;
if (field.name().equals("time_field")) {
// Iceberg's AvroSchemaUtil uses timestamp-micros with Long value for time
// field, while AvroToRowDataConverters#convertToTime() always looks for
Expand All @@ -124,10 +125,10 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema(
LogicalTypes.timeMillis()
.addToSchema(
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT));
field = new org.apache.avro.Schema.Field("time_field", fieldSchema);
updatedField = new org.apache.avro.Schema.Field("time_field", fieldSchema);
}

return new org.apache.avro.Schema.Field(field, field.schema());
return new org.apache.avro.Schema.Field(updatedField, updatedField.schema());
})
.collect(Collectors.toList());
return org.apache.avro.Schema.createRecord(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ public void createInputFile() throws IOException {
structNotNull.put("_int_field", INT_MIN_VALUE + i);
builder.set("_struct_not_null", structNotNull); // struct with int
builder.set("_no_stats", TOO_LONG_FOR_STATS); // value longer than 4k will produce no stats
builder.set("_boolean", (i % 2 == 0) ? true : false);
builder.set("_boolean", i % 2 == 0);
builder.set("_time", instant.plusSeconds(i * 86400).toEpochMilli());
builder.set("_date", instant.plusSeconds(i * 86400).getEpochSecond());
builder.set("_timestamp", instant.plusSeconds(i * 86400).toEpochMilli());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ public void writeTestDataFile() throws IOException {
"id_string",
BINARY_PREFIX + (INT_MIN_VALUE + i),
"id_boolean",
(i % 2 == 0) ? true : false,
i % 2 == 0,
"id_date",
LocalDate.parse("2021-09-05"),
"id_int_decimal",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ public void writeTestDataFile() throws IOException {
"id_string",
BINARY_PREFIX + (INT_MIN_VALUE + i),
"id_boolean",
(i % 2 == 0) ? true : false,
i % 2 == 0,
"id_date",
LocalDate.parse("2021-09-05"),
"id_int_decimal",
Expand Down