diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveUserDefinedTypeSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveUserDefinedTypeSuite.scala index bddb7688fe96..c160ff206a62 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveUserDefinedTypeSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveUserDefinedTypeSuite.scala @@ -18,12 +18,13 @@ package org.apache.spark.sql.hive import scala.collection.JavaConverters._ +import scala.util.Random import org.apache.hadoop.hive.ql.udf.generic.GenericUDF import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, StandardListObjectInspector} import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory -import org.apache.spark.sql.{QueryTest, RandomDataGenerator, Row} +import org.apache.spark.sql.{QueryTest, Row} import org.apache.spark.sql.catalyst.FunctionIdentifier import org.apache.spark.sql.hive.test.TestHiveSingleton import org.apache.spark.sql.test.{ExamplePoint, ExamplePointUDT} @@ -35,9 +36,8 @@ class HiveUserDefinedTypeSuite extends QueryTest with TestHiveSingleton { test("Support UDT in Hive UDF") { val functionName = "get_point_x" try { - val schema = new StructType().add("point", new ExamplePointUDT) - val inputGenerator = RandomDataGenerator.forType(schema, nullable = false).get - val input = inputGenerator.apply().asInstanceOf[Row] + val schema = new StructType().add("point", new ExamplePointUDT, nullable = false) + val input = Row.fromSeq(Seq(new ExamplePoint(3.141592d, -3.141592d))) val df = spark.createDataFrame(Array(input).toList.asJava, schema) df.createOrReplaceTempView("src") spark.sql(s"CREATE FUNCTION $functionName AS '$functionClass'")