diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index 4e2625086837..47fd7fc1178a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -604,7 +604,7 @@ class SQLContext private[sql]( val className = beanClass.getName val beanInfo = Introspector.getBeanInfo(beanClass) val rows = SQLContext.beansToRows(data.asScala.iterator, beanInfo, attrSeq) - DataFrame(self, LocalRelation(attrSeq, rows.toSeq)) + DataFrame(self, LocalRelation(attrSeq, rows.toArray)) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala index 1994dacfc4df..9bf865d89f92 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala @@ -65,4 +65,21 @@ class SQLContextSuite extends SparkFunSuite with SharedSparkContext{ session2.sql("select myadd(1, 2)").explain() } } + + test("SPARK-13390: createDataFrame(java.util.List[_],Class[_]) NotSerializableException") { + val rows = new java.util.ArrayList[IntJavaBean]() + rows.add(new IntJavaBean(1)) + val sqlContext = SQLContext.getOrCreate(sc) + // Without the fix for SPARK-13390, this will throw NotSerializableException + sqlContext.createDataFrame(rows, classOf[IntJavaBean]).groupBy("int").count().collect() + } +} + +class IntJavaBean(private var i: Int) extends Serializable { + + def getInt(): Int = i + + def setInt(i: Int): Unit = { + this.i = i + } }