From c0e08b84c5d600789fbafa38692669022ae6973d Mon Sep 17 00:00:00 2001 From: Eric Liang Date: Tue, 27 Sep 2016 19:18:20 -0700 Subject: [PATCH 1/3] wip --- .../spark/sql/execution/datasources/DataSourceStrategy.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala index 63f01c5bb9e3..e792104b12f7 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala @@ -340,6 +340,8 @@ object DataSourceStrategy extends Strategy with Logging { // `Filter`s or cannot be handled by `relation`. val filterCondition = unhandledPredicates.reduceLeftOption(expressions.And) + // These metadata values make scan plans uniquely identifiable for equality checking. + // TODO(ekl) using strings for equality checking is brittle val metadata: Map[String, String] = { val pairs = ArrayBuffer.empty[(String, String)] @@ -350,6 +352,8 @@ object DataSourceStrategy extends Strategy with Logging { } pairs += ("PushedFilters" -> markedFilters.mkString("[", ", ", "]")) } + pairs += ("ReadSchema" -> + StructType.fromAttributes(projects.map(_.toAttribute)).catalogString) pairs.toMap } From f0df3a005be301ac627beb53a233ece06269ced8 Mon Sep 17 00:00:00 2001 From: Eric Liang Date: Tue, 27 Sep 2016 20:11:54 -0700 Subject: [PATCH 2/3] Tue Sep 27 20:11:54 PDT 2016 --- .../test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala index 10f15ca28068..c94cb3b69dfb 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala @@ -791,4 +791,12 @@ class JDBCSuite extends SparkFunSuite val schema = JdbcUtils.schemaString(df, "jdbc:mysql://localhost:3306/temp") assert(schema.contains("`order` TEXT")) } + + test("SPARK-17673: Exchange reuse respects differences in output schema") { + val df = sql("SELECT * FROM inttypes WHERE a IS NOT NULL") + val df1 = df.groupBy("a").agg("c" -> "min") + val df2 = df.groupBy("a").agg("d" -> "min") + val res = df1.union(df2) + assert(res.distinct().count() == 2) // would be 1 if the exchange was incorrectly reused + } } From e2dfb25aeef021788208a0f05420d211916d2db5 Mon Sep 17 00:00:00 2001 From: Eric Liang Date: Tue, 27 Sep 2016 20:18:47 -0700 Subject: [PATCH 3/3] Tue Sep 27 20:18:47 PDT 2016 --- .../spark/sql/execution/datasources/DataSourceStrategy.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala index e792104b12f7..693b4c4d0e5e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala @@ -341,7 +341,7 @@ object DataSourceStrategy extends Strategy with Logging { val filterCondition = unhandledPredicates.reduceLeftOption(expressions.And) // These metadata values make scan plans uniquely identifiable for equality checking. - // TODO(ekl) using strings for equality checking is brittle + // TODO(SPARK-17701) using strings for equality checking is brittle val metadata: Map[String, String] = { val pairs = ArrayBuffer.empty[(String, String)]