Skip to content

Commit 3fbef60

Browse files
committed
Factored execute() method of physical commands to parent class Command
1 parent 5a0e16c commit 3fbef60

File tree

4 files changed

+2
-26
lines changed

4 files changed

+2
-26
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala

Lines changed: 2 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,8 @@ trait Command {
4040
protected[sql] lazy val sideEffectResult: Seq[Row] = Seq.empty[Row]
4141

4242
override def executeCollect(): Array[Row] = sideEffectResult.toArray
43+
44+
override def execute(): RDD[Row] = sqlContext.sparkContext.parallelize(sideEffectResult, 1)
4345
}
4446

4547
/**
@@ -93,8 +95,6 @@ case class SetCommand(
9395
throw new IllegalArgumentException()
9496
}
9597

96-
def execute(): RDD[Row] = context.sparkContext.parallelize(sideEffectResult, 1)
97-
9898
override def otherCopyArgs = context :: Nil
9999
}
100100

@@ -123,8 +123,6 @@ case class ExplainCommand(
123123
("Error occurred during query planning: \n" + cause.getMessage).split("\n").map(Row(_))
124124
}
125125

126-
def execute(): RDD[Row] = context.sparkContext.parallelize(sideEffectResult, 1)
127-
128126
override def otherCopyArgs = context :: Nil
129127
}
130128

@@ -144,11 +142,6 @@ case class CacheCommand(tableName: String, doCache: Boolean)(@transient context:
144142
Seq.empty[Row]
145143
}
146144

147-
override def execute(): RDD[Row] = {
148-
sideEffectResult
149-
context.emptyResult
150-
}
151-
152145
override def output: Seq[Attribute] = Seq.empty
153146
}
154147

@@ -164,6 +157,4 @@ case class DescribeCommand(child: SparkPlan, output: Seq[Attribute])(
164157
Row("# Registered as a temporary table", null, null) +:
165158
child.output.map(field => Row(field.name, field.dataType.toString, null))
166159
}
167-
168-
override def execute(): RDD[Row] = context.sparkContext.parallelize(sideEffectResult, 1)
169160
}

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/DescribeHiveTableCommand.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,5 @@ case class DescribeHiveTableCommand(
7474
}
7575
}
7676

77-
override def execute(): RDD[Row] = context.sparkContext.parallelize(sideEffectResult, 1)
78-
7977
override def otherCopyArgs = context :: Nil
8078
}

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/NativeCommand.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,5 @@ case class NativeCommand(
3434

3535
override protected[sql] lazy val sideEffectResult: Seq[Row] = context.runSqlHive(sql).map(Row(_))
3636

37-
override def execute(): RDD[Row] = context.sparkContext.parallelize(sideEffectResult, 1)
38-
3937
override def otherCopyArgs = context :: Nil
4038
}

sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,6 @@ case class AnalyzeTable(tableName: String) extends LeafNode with Command {
4141
hiveContext.analyze(tableName)
4242
Seq.empty[Row]
4343
}
44-
45-
override def execute(): RDD[Row] = {
46-
sideEffectResult
47-
sparkContext.emptyRDD[Row]
48-
}
4944
}
5045

5146
/**
@@ -54,7 +49,6 @@ case class AnalyzeTable(tableName: String) extends LeafNode with Command {
5449
*/
5550
@DeveloperApi
5651
case class DropTable(tableName: String, ifExists: Boolean) extends LeafNode with Command {
57-
5852
def hiveContext = sqlContext.asInstanceOf[HiveContext]
5953

6054
def output = Seq.empty
@@ -65,9 +59,4 @@ case class DropTable(tableName: String, ifExists: Boolean) extends LeafNode with
6559
hiveContext.catalog.unregisterTable(None, tableName)
6660
Seq.empty[Row]
6761
}
68-
69-
override def execute(): RDD[Row] = {
70-
sideEffectResult
71-
sparkContext.emptyRDD[Row]
72-
}
7362
}

0 commit comments

Comments
 (0)