diff --git a/mllib/src/test/scala/org/apache/spark/ml/feature/StopWordsRemoverSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/feature/StopWordsRemoverSuite.scala index c142f83e0595..dc6fb31a1f8e 100755 --- a/mllib/src/test/scala/org/apache/spark/ml/feature/StopWordsRemoverSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/feature/StopWordsRemoverSuite.scala @@ -242,7 +242,7 @@ class StopWordsRemoverSuite extends MLTest with DefaultReadWriteTest { remover.transform(df) .select("filtered1", "expected1", "filtered2", "expected2") .collect().foreach { - case Row(r1: Seq[String], e1: Seq[String], r2: Seq[String], e2: Seq[String]) => + case Row(r1: Seq[_], e1: Seq[_], r2: Seq[_], e2: Seq[_]) => assert(r1 === e1, s"The result value is not correct after bucketing. Expected $e1 but found $r1") assert(r2 === e2, @@ -268,7 +268,7 @@ class StopWordsRemoverSuite extends MLTest with DefaultReadWriteTest { remover.transform(df) .select("filtered1", "expected1", "filtered2", "expected2") .collect().foreach { - case Row(r1: Seq[String], e1: Seq[String], r2: Seq[String], e2: Seq[String]) => + case Row(r1: Seq[_], e1: Seq[_], r2: Seq[_], e2: Seq[_]) => assert(r1 === e1, s"The result value is not correct after bucketing. Expected $e1 but found $r1") assert(r2 === e2, diff --git a/mllib/src/test/scala/org/apache/spark/ml/util/MLTest.scala b/mllib/src/test/scala/org/apache/spark/ml/util/MLTest.scala index 3e1e2ad6a7f5..410fbf297846 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/util/MLTest.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/util/MLTest.scala @@ -88,6 +88,8 @@ trait MLTest extends StreamTest with TempDirectory { self: Suite => val n = Attribute.fromStructField(dataframe.schema(colName)) match { case binAttr: BinaryAttribute => Some(2) case nomAttr: NominalAttribute => nomAttr.getNumValues + case unknown => + throw new IllegalArgumentException(s"Attribute type: ${unknown.getClass.getName}") } assert(n.isDefined && n.get === numValues, s"the number of values obtained from schema should be $numValues, but got $n") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala index 402c5e1472c3..1491f5904bae 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala @@ -78,8 +78,12 @@ case object FloatType extends FloatType { } trait FloatAsIfIntegral extends FloatIsConflicted with Integral[Float] { - def quot(x: Float, y: Float): Float = (BigDecimal(x) quot BigDecimal(y)).floatValue - def rem(x: Float, y: Float): Float = (BigDecimal(x) remainder BigDecimal(y)).floatValue + def quot(x: Float, y: Float): Float = { + (BigDecimal(x.toDouble) quot BigDecimal(y.toDouble)).floatValue + } + def rem(x: Float, y: Float): Float = { + (BigDecimal(x.toDouble) remainder BigDecimal(y.toDouble)).floatValue + } } object FloatAsIfIntegral extends FloatAsIfIntegral { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisExternalCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisExternalCatalogSuite.scala index 9b6e8841a980..a85ac3fc4d0b 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisExternalCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisExternalCatalogSuite.scala @@ -59,7 +59,7 @@ class AnalysisExternalCatalogSuite extends AnalysisTest with Matchers { Alias(UnresolvedFunction("sum", Seq(UnresolvedAttribute("a")), isDistinct = false), "s")() val plan = Project(Seq(func), testRelation) analyzer.execute(plan) - verifyZeroInteractions(catalog) + verifyNoInteractions(catalog) } } @@ -73,7 +73,7 @@ class AnalysisExternalCatalogSuite extends AnalysisTest with Matchers { ignoreIfExists = false) reset(externCatalog) catalog.functionExists(FunctionIdentifier("sum")) - verifyZeroInteractions(externCatalog) + verifyNoInteractions(externCatalog) } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/csv/CSVExprUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/csv/CSVExprUtilsSuite.scala index 4696a578f2f9..06ffb4231062 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/csv/CSVExprUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/csv/CSVExprUtilsSuite.scala @@ -78,7 +78,7 @@ class CSVExprUtilsSuite extends SparkFunSuite { // null character, expressed in Unicode literal syntax ("""\u0000""", Some("\u0000"), None), // and specified directly - ("\0", Some("\u0000"), None) + ("\u0000", Some("\u0000"), None) ) test("should correctly produce separator strings, or exceptions, from input") { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala index b4343b648110..cc9ebfe40942 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala @@ -20,6 +20,7 @@ package org.apache.spark.sql.catalyst.expressions import java.sql.{Date, Timestamp} import java.util.TimeZone +import scala.language.implicitConversions import scala.util.Random import org.apache.spark.SparkFunSuite diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/HashExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/HashExpressionsSuite.scala index 3a68847ecb1f..7dc072722352 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/HashExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/HashExpressionsSuite.scala @@ -21,6 +21,7 @@ import java.nio.charset.StandardCharsets import java.time.{ZoneId, ZoneOffset} import scala.collection.mutable.ArrayBuffer +import scala.language.implicitConversions import org.apache.commons.codec.digest.DigestUtils import org.scalatest.exceptions.TestFailedException diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 0b694ea95415..246709b70abb 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -20,6 +20,8 @@ import java.sql.{Date, Timestamp} import java.time.LocalDateTime import java.util.concurrent.TimeUnit +import scala.language.implicitConversions + import org.apache.spark.sql.catalyst.FunctionIdentifier import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, _} import org.apache.spark.sql.catalyst.expressions._