diff --git a/common/src/java/org/apache/hadoop/hive/common/type/SignedInt128.java b/common/src/java/org/apache/hadoop/hive/common/type/SignedInt128.java index 9da8cc91b173..468a161d0c55 100644 --- a/common/src/java/org/apache/hadoop/hive/common/type/SignedInt128.java +++ b/common/src/java/org/apache/hadoop/hive/common/type/SignedInt128.java @@ -20,6 +20,7 @@ import java.math.BigInteger; import java.nio.IntBuffer; +import java.util.Objects; /** * This code was based on code from Microsoft's PolyBase. @@ -683,7 +684,7 @@ public void subtractDestructive(SignedInt128 right) { */ public static void multiply(SignedInt128 left, SignedInt128 right, SignedInt128 result) { - if (result == left || result == right) { + if (Objects.equals(result, left) || Objects.equals(result, right)) { throw new IllegalArgumentException( "result object cannot be left or right operand"); } @@ -768,7 +769,7 @@ public int divideDestructive(int right) { */ public static void divide(SignedInt128 left, SignedInt128 right, SignedInt128 quotient, SignedInt128 remainder) { - if (quotient == left || quotient == right) { + if (Objects.equals(quotient, left) || Objects.equals(quotient, right)) { throw new IllegalArgumentException( "result object cannot be left or right operand"); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/OrderlessLimitPushDownOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/OrderlessLimitPushDownOptimizer.java index 0448d24eb19a..d24968891c13 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/OrderlessLimitPushDownOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/OrderlessLimitPushDownOptimizer.java @@ -27,6 +27,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Stack; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; @@ -114,7 +115,7 @@ private void combineLimits(LimitOperator childLimit) throws SemanticException { LimitOperator parentLimit = (LimitOperator) childLimit.getParentOperators().get(0); LimitDesc parentConf = parentLimit.getConf(); LimitDesc childConf = childLimit.getConf(); - if (parentConf.getOffset() == childConf.getOffset()) { + if (Objects.equals(parentConf.getOffset(), childConf.getOffset())) { int min = Math.min(parentConf.getLimit(), childConf.getLimit()); LOG.debug("Combining two limits child={}, parent={}, newLimit={}", childLimit, parentLimit, min); parentConf.setLimit(min); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java index f0fc1b4b1508..223ce6072a9b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java @@ -927,7 +927,7 @@ public boolean isDistanceGreaterPrimitive(Double d1, Double d2, int amt) { @Override public boolean isEqualPrimitive(Double d1, Double d2) { if (d1 != null && d2 != null) { - return d1 == d2; + return d1.equals(d2); } return d1 == null && d2 == null; // True if both are null diff --git a/ql/src/test/results/clientpositive/llap/windowing_udaf.q.out b/ql/src/test/results/clientpositive/llap/windowing_udaf.q.out index 1c216fdaf033..0d353786a2f0 100644 --- a/ql/src/test/results/clientpositive/llap/windowing_udaf.q.out +++ b/ql/src/test/results/clientpositive/llap/windowing_udaf.q.out @@ -505,7 +505,7 @@ alice brown 25.258749999999996 alice brown 25.529374999999998 alice brown 25.63012987012987 alice brown 26.472439024390237 -alice brown 27.27881720430106 +alice brown 27.100638297872322 alice brown 28.227894736842114 alice brown 44.05 alice carson 22.345500000000005