Skip to content

Commit fa4509f

Browse files
committed
rename coalesce
1 parent 2c349b5 commit fa4509f

File tree

6 files changed

+11
-12
lines changed

6 files changed

+11
-12
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -308,8 +308,7 @@ class SqlParser extends AbstractSparkSQLParser with DataTypeParser {
308308
{ case s ~ p => Substring(s, p, Literal(Integer.MAX_VALUE)) }
309309
| (SUBSTR | SUBSTRING) ~ "(" ~> expression ~ ("," ~> expression) ~ ("," ~> expression) <~ ")" ^^
310310
{ case s ~ p ~ l => Substring(s, p, l) }
311-
| COALESCE ~ "(" ~> repsep(expression, ",") <~ ")" ^^ { case exprs =>
312-
expressions.Coalesce(exprs) }
311+
| COALESCE ~ "(" ~> repsep(expression, ",") <~ ")" ^^ { case exprs => Coalesce(exprs) }
313312
| SQRT ~ "(" ~> expression <~ ")" ^^ { case exp => Sqrt(exp) }
314313
| ABS ~ "(" ~> expression <~ ")" ^^ { case exp => Abs(exp) }
315314
| ident ~ ("(" ~> repsep(expression, ",")) <~ ")" ^^

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.sql.catalyst.optimizer
1919

2020
import scala.collection.immutable.HashSet
2121
import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
22-
import org.apache.spark.sql.catalyst.expressions
2322
import org.apache.spark.sql.catalyst.expressions._
2423
import org.apache.spark.sql.catalyst.plans.Inner
2524
import org.apache.spark.sql.catalyst.plans.FullOuter
@@ -235,7 +234,7 @@ object NullPropagation extends Rule[LogicalPlan] {
235234
case e @ Count(expr) if !expr.nullable => Count(Literal(1))
236235

237236
// For Coalesce, remove null literals.
238-
case e @ expressions.Coalesce(children) =>
237+
case e @ Coalesce(children) =>
239238
val newChildren = children.filter {
240239
case Literal(null, _) => false
241240
case _ => true
@@ -245,7 +244,7 @@ object NullPropagation extends Rule[LogicalPlan] {
245244
} else if (newChildren.length == 1) {
246245
newChildren(0)
247246
} else {
248-
expressions.Coalesce(newChildren)
247+
Coalesce(newChildren)
249248
}
250249

251250
case e @ Substring(Literal(null, _), _, _) => Literal.create(null, e.dataType)

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -310,7 +310,8 @@ case class Distinct(child: LogicalPlan) extends UnaryNode {
310310
override def output: Seq[Attribute] = child.output
311311
}
312312

313-
case class Coalesce(numPartitions: Int, shuffle: Boolean, child: LogicalPlan) extends UnaryNode {
313+
case class CoalescePartitions(numPartitions: Int, shuffle: Boolean, child: LogicalPlan)
314+
extends UnaryNode {
314315
override def output: Seq[Attribute] = child.output
315316
}
316317

sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,6 @@ import org.apache.spark.sql.catalyst.{CatalystTypeConverters, ScalaReflection, S
3737
import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute, UnresolvedRelation, ResolvedStar}
3838
import org.apache.spark.sql.catalyst.expressions._
3939
import org.apache.spark.sql.catalyst.plans.{JoinType, Inner}
40-
import org.apache.spark.sql.catalyst.plans.logical
4140
import org.apache.spark.sql.catalyst.plans.logical._
4241
import org.apache.spark.sql.execution.{EvaluatePython, ExplainCommand, LogicalRDD}
4342
import org.apache.spark.sql.jdbc.JDBCWriteDetails
@@ -962,7 +961,7 @@ class DataFrame private[sql](
962961
* @group rdd
963962
*/
964963
override def repartition(numPartitions: Int): DataFrame = {
965-
logical.Coalesce(numPartitions, shuffle = true, logicalPlan)
964+
CoalescePartitions(numPartitions, shuffle = true, logicalPlan)
966965
}
967966

968967
/**
@@ -973,7 +972,7 @@ class DataFrame private[sql](
973972
* @group rdd
974973
*/
975974
override def coalesce(numPartitions: Int): DataFrame = {
976-
logical.Coalesce(numPartitions, shuffle = false, logicalPlan)
975+
CoalescePartitions(numPartitions, shuffle = false, logicalPlan)
977976
}
978977

979978
/**

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -283,8 +283,8 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
283283
case logical.Distinct(child) =>
284284
execution.Distinct(partial = false,
285285
execution.Distinct(partial = true, planLater(child))) :: Nil
286-
case logical.Coalesce(numPartitions, shuffle, child) =>
287-
execution.Coalesce(numPartitions, shuffle, planLater(child)) :: Nil
286+
case logical.CoalescePartitions(numPartitions, shuffle, child) =>
287+
execution.CoalescePartitions(numPartitions, shuffle, planLater(child)) :: Nil
288288
case logical.SortPartitions(sortExprs, child) =>
289289
// This sort only sorts tuples within a partition. Its requiredDistribution will be
290290
// an UnspecifiedDistribution.

sql/core/src/main/scala/org/apache/spark/sql/execution/basicOperators.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,8 @@ case class Distinct(partial: Boolean, child: SparkPlan) extends UnaryNode {
250250
* Return a new RDD that has exactly `numPartitions` partitions.
251251
*/
252252
@DeveloperApi
253-
case class Coalesce(numPartitions: Int, shuffle: Boolean, child: SparkPlan) extends UnaryNode {
253+
case class CoalescePartitions(numPartitions: Int, shuffle: Boolean, child: SparkPlan)
254+
extends UnaryNode {
254255
override def output: Seq[Attribute] = child.output
255256

256257
override def execute(): RDD[Row] = {

0 commit comments

Comments
 (0)