Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion api/src/main/java/org/apache/iceberg/SortField.java
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ public NullOrder nullOrder() {
* @return true if this order satisfies the given order
*/
public boolean satisfies(SortField other) {
if (this == other) {
if (Objects.equals(this, other)) {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

even though using Ref equlity is correct here, it just takes the reader much longer to verify whether it's the intention or not than just using equals() here

return true;
} else if (sourceId != other.sourceId || direction != other.direction || nullOrder != other.nullOrder) {
return false;
Expand Down
3 changes: 2 additions & 1 deletion api/src/main/java/org/apache/iceberg/types/TypeUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Predicate;
Expand All @@ -45,7 +46,7 @@ public static Schema select(Schema schema, Set<Integer> fieldIds) {
Preconditions.checkNotNull(schema, "Schema cannot be null");

Types.StructType result = select(schema.asStruct(), fieldIds);
if (schema.asStruct() == result) {
if (Objects.equals(schema.asStruct(), result)) {
return schema;
} else if (result != null) {
if (schema.getAliases() != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Supplier;
import org.apache.avro.JsonProperties;
import org.apache.avro.Schema;
Expand Down Expand Up @@ -133,7 +134,7 @@ public Schema.Field field(Schema.Field field, Supplier<Schema> fieldResult) {
try {
Schema schema = fieldResult.get();

if (schema != field.schema() || !expectedName.equals(field.name())) {
if (!Objects.equals(schema, field.schema()) || !expectedName.equals(field.name())) {
// add an alias for the field
return AvroSchemaUtil.copyField(field, schema, AvroSchemaUtil.makeCompatibleName(expectedName));
} else {
Expand All @@ -153,7 +154,7 @@ public Schema union(Schema union, Iterable<Schema> options) {
Schema nonNullOriginal = AvroSchemaUtil.fromOption(union);
Schema nonNullResult = AvroSchemaUtil.fromOptions(Lists.newArrayList(options));

if (nonNullOriginal != nonNullResult) {
if (!Objects.equals(nonNullOriginal, nonNullResult)) {
return AvroSchemaUtil.toOption(nonNullResult);
}

Expand All @@ -174,7 +175,7 @@ public Schema array(Schema array, Supplier<Schema> element) {
Schema.Field valueProjection = element.get().getField("value");

// element was changed, create a new array
if (valueProjection.schema() != valueField.schema()) {
if (!Objects.equals(valueProjection.schema(), valueField.schema())) {
return AvroSchemaUtil.createProjectionMap(keyValueSchema.getFullName(),
AvroSchemaUtil.getFieldId(keyField), keyField.name(), keyField.schema(),
AvroSchemaUtil.getFieldId(valueField), valueField.name(), valueProjection.schema());
Expand All @@ -199,7 +200,7 @@ public Schema array(Schema array, Supplier<Schema> element) {
Schema elementSchema = element.get();

// element was changed, create a new array
if (elementSchema != array.getElementType()) {
if (!Objects.equals(elementSchema, array.getElementType())) {
return Schema.createArray(elementSchema);
}

Expand All @@ -223,7 +224,7 @@ public Schema map(Schema map, Supplier<Schema> value) {
Schema valueSchema = value.get();

// element was changed, create a new map
if (valueSchema != map.getValueType()) {
if (!Objects.equals(valueSchema, map.getValueType())) {
return Schema.createMap(valueSchema);
}

Expand Down
14 changes: 8 additions & 6 deletions core/src/main/java/org/apache/iceberg/avro/PruneColumns.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.apache.avro.JsonProperties;
import org.apache.avro.Schema;
Expand Down Expand Up @@ -118,7 +119,7 @@ public Schema union(Schema union, List<Schema> options) {
}

if (pruned != null) {
if (pruned != AvroSchemaUtil.fromOption(union)) {
if (!Objects.equals(pruned, AvroSchemaUtil.fromOption(union))) {
return AvroSchemaUtil.toOption(pruned);
}
return union;
Expand Down Expand Up @@ -149,13 +150,14 @@ public Schema array(Schema array, Schema element) {
Schema valueProjection = element.getField("value").schema();
// it is possible that key is not selected, and
// key schemas can be different if new field ids were assigned to them
if (keyProjectionField != null && keyValue.getField("key").schema() != keyProjectionField.schema()) {
if (keyProjectionField != null &&
!Objects.equals(keyValue.getField("key").schema(), keyProjectionField.schema())) {
Preconditions.checkState(
SchemaNormalization.parsingFingerprint64(keyValue.getField("key").schema()) ==
SchemaNormalization.parsingFingerprint64(keyProjectionField.schema()),
"Map keys should not be projected");
"Map keys should not be projected");
return AvroSchemaUtil.createMap(keyId, keyProjectionField.schema(), valueId, valueProjection);
} else if (keyValue.getField("value").schema() != valueProjection) {
} else if (!Objects.equals(keyValue.getField("value").schema(), valueProjection)) {
return AvroSchemaUtil.createMap(keyId, keyValue.getField("key").schema(), valueId, valueProjection);
} else {
return complexMapWithIds(array, keyId, valueId);
Expand All @@ -171,7 +173,7 @@ public Schema array(Schema array, Schema element) {
if (selectedIds.contains(elementId)) {
return arrayWithId(array, elementId);
} else if (element != null) {
if (element != array.getElementType()) {
if (!Objects.equals(element, array.getElementType())) {
// the element must be a projection
return arrayWithId(Schema.createArray(element), elementId);
}
Expand Down Expand Up @@ -199,7 +201,7 @@ public Schema map(Schema map, Schema value) {
// e.g if we are reading data not written by Iceberg writers
return mapWithIds(map, keyId, valueId);
} else if (value != null) {
if (value != map.getValueType()) {
if (!Objects.equals(value, map.getValueType())) {
// the value must be a projection
return mapWithIds(Schema.createMap(value), keyId, valueId);
}
Expand Down
3 changes: 2 additions & 1 deletion core/src/main/java/org/apache/iceberg/avro/RemoveIds.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.avro.Schema;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;

Expand Down Expand Up @@ -74,7 +75,7 @@ private static Schema.Field copyField(Schema.Field field, Schema newSchema) {
Schema.Field copy = new Schema.Field(field.name(), newSchema, field.doc(), field.defaultVal(), field.order());
for (Map.Entry<String, Object> prop : field.getObjectProps().entrySet()) {
String key = prop.getKey();
if (key != AvroSchemaUtil.FIELD_ID_PROP) {
if (!Objects.equals(key, AvroSchemaUtil.FIELD_ID_PROP)) {
copy.addProp(key, prop.getValue());
}
}
Expand Down
3 changes: 2 additions & 1 deletion core/src/main/java/org/apache/iceberg/avro/SchemaToType.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.apache.iceberg.avro;

import java.util.List;
import java.util.Objects;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
Expand Down Expand Up @@ -83,7 +84,7 @@ public Type record(Schema record, List<String> names, List<Type> fieldTypes) {
List<Schema.Field> fields = record.getFields();
List<Types.NestedField> newFields = Lists.newArrayListWithExpectedSize(fields.size());

if (root == record) {
if (Objects.equals(root, record)) {
this.nextId = 0;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ public Type visit(MapType mapType) {
}

@Override
@SuppressWarnings("ReferenceEquality")
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is for the boolean isRoot = root == rowType check, which seems to be on purpose, but maybe you could double check whether using ref. equality here is still wanted? Same for SparkTypeToType

public Type visit(RowType rowType) {
List<Types.NestedField> newFields = Lists.newArrayListWithExpectedSize(rowType.getFieldCount());
boolean isRoot = root == rowType;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ public Type map(GroupType map, Type key, Type value) {
} else if (value != null) {
Integer mapId = getId(map);
if (!Objects.equal(value, originalValue)) {
Type mapType = Types.map(map.getRepetition())
Type mapType = Types.map(map.getRepetition())
.key(originalKey)
.value(value)
.named(map.getName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ public static void importSparkTable(SparkSession spark, TableIdentifier sourceTa
try {
PartitionSpec spec = SparkSchemaUtil.specForTable(spark, sourceTableIdentWithDB.unquotedString());

if (spec == PartitionSpec.unpartitioned()) {
if (Objects.equal(spec, PartitionSpec.unpartitioned())) {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

checking for ref. equality is probably fine here, but it takes a reader longer to navigate the code and figure out whether ref equality is really wanted here vs just using equals()

importUnpartitionedSparkTable(spark, sourceTableIdentWithDB, targetTable);
} else {
List<SparkPartition> sourceTablePartitions = getPartitions(spark, sourceTableIdent);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ private int getNextId() {
}

@Override
@SuppressWarnings("ReferenceEquality")
public Type struct(StructType struct, List<Type> types) {
StructField[] fields = struct.fields();
List<Types.NestedField> newFields = Lists.newArrayListWithExpectedSize(fields.length);
Expand Down