File tree Expand file tree Collapse file tree 2 files changed +26
-7
lines changed
sql/core/src/test/scala/org/apache/spark/sql/execution Expand file tree Collapse file tree 2 files changed +26
-7
lines changed Original file line number Diff line number Diff line change @@ -704,6 +704,23 @@ class PlannerSuite extends SharedSQLContext {
704704 df.queryExecution.executedPlan.execute()
705705 }
706706
707+ test(" SPARK-25278: physical nodes should be different instances for same logical nodes" ) {
708+ val range = Range (1 , 1 , 1 , 1 )
709+ val df = Union (range, range)
710+ val ranges = df.queryExecution.optimizedPlan.collect {
711+ case r : Range => r
712+ }
713+ assert(ranges.length == 2 )
714+ // Ensure the two Range instances are equal according to their equal method
715+ assert(ranges.head == ranges.last)
716+ val execRanges = df.queryExecution.sparkPlan.collect {
717+ case r : RangeExec => r
718+ }
719+ assert(execRanges.length == 2 )
720+ // Ensure the two RangeExec instances are different instances
721+ assert(! execRanges.head.eq(execRanges.last))
722+ }
723+
707724 test(" SPARK-24556: always rewrite output partitioning in ReusedExchangeExec " +
708725 " and InMemoryTableScanExec" ) {
709726 def checkOutputPartitioningRewrite (
Original file line number Diff line number Diff line change @@ -499,13 +499,15 @@ class SQLMetricsSuite extends SparkFunSuite with SQLMetricsTestUtils with Shared
499499
500500 test(" SPARK-25278: output metrics are wrong for plans repeated in the query" ) {
501501 val name = " demo_view"
502- sql(s " CREATE OR REPLACE VIEW $name AS VALUES 1,2 " )
503- val view = spark.table(name)
504- val union = view.union(view)
505- testSparkPlanMetrics(union, 1 , Map (
506- 0L -> (" Union" -> Map ()),
507- 1L -> (" LocalTableScan" -> Map (" number of output rows" -> 2L )),
508- 2L -> (" LocalTableScan" -> Map (" number of output rows" -> 2L ))))
502+ withView(name) {
503+ sql(s " CREATE OR REPLACE VIEW $name AS VALUES 1,2 " )
504+ val view = spark.table(name)
505+ val union = view.union(view)
506+ testSparkPlanMetrics(union, 1 , Map (
507+ 0L -> (" Union" -> Map ()),
508+ 1L -> (" LocalTableScan" -> Map (" number of output rows" -> 2L )),
509+ 2L -> (" LocalTableScan" -> Map (" number of output rows" -> 2L ))))
510+ }
509511 }
510512
511513 test(" writing data out metrics: parquet" ) {
You can’t perform that action at this time.
0 commit comments