diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScan.scala index 8a8c3a404323..5ab62ce64d24 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScan.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/LocalTableScan.scala @@ -28,7 +28,11 @@ import org.apache.spark.sql.catalyst.expressions.Attribute */ case class LocalTableScan(output: Seq[Attribute], rows: Seq[Row]) extends LeafNode { - private lazy val rdd = sqlContext.sparkContext.parallelize(rows) + private lazy val rdd = { + val converted = rows.map( + r => CatalystTypeConverters.convertToCatalyst(r, schema).asInstanceOf[Row]) + sqlContext.sparkContext.parallelize(converted) + } override def execute(): RDD[Row] = rdd