Skip to content

Commit 3b0c71b

Browse files
Remove Parser for set commands. A few other fixes.
1 parent d0c4578 commit 3b0c71b

File tree

6 files changed

+19
-21
lines changed

6 files changed

+19
-21
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -41,10 +41,18 @@ import org.apache.spark.sql.catalyst.types._
4141
* for a SQL like language should checkout the HiveQL support in the sql/hive sub-project.
4242
*/
4343
class SqlParser extends StandardTokenParsers with PackratParsers {
44+
4445
def apply(input: String): LogicalPlan = {
45-
phrase(query)(new lexical.Scanner(input)) match {
46-
case Success(r, x) => r
47-
case x => sys.error(x.toString)
46+
// Special-case out set commands since the value fields can be
47+
// complex to handle without RegexParsers.
48+
if (input.toLowerCase.startsWith("set")) {
49+
val kvPair = input.drop(3).split("=")
50+
SetCommand(kvPair(0).trim, if (kvPair.size > 1) kvPair(1).trim else "")
51+
} else {
52+
phrase(query)(new lexical.Scanner(input)) match {
53+
case Success(r, x) => r
54+
case x => sys.error(x.toString)
55+
}
4856
}
4957
}
5058

@@ -170,22 +178,13 @@ class SqlParser extends StandardTokenParsers with PackratParsers {
170178
}
171179

172180
protected lazy val query: Parser[LogicalPlan] = (
173-
setCommand
174-
| select * (
181+
select * (
175182
UNION ~ ALL ^^^ { (q1: LogicalPlan, q2: LogicalPlan) => Union(q1, q2) } |
176183
UNION ~ opt(DISTINCT) ^^^ { (q1: LogicalPlan, q2: LogicalPlan) => Distinct(Union(q1, q2)) }
177184
)
178185
| insert
179186
)
180187

181-
protected lazy val setCommand: Parser[LogicalPlan] = {
182-
// Comma needed for values such as "table1,table2".
183-
val keyVal = ident | numericLit | stringLit
184-
(SET ~> keyVal <~ "=") ~ rep1sep(keyVal, ",") <~ opt(";") ^^ {
185-
case key ~ value => SetCommand(key, value.mkString(","))
186-
}
187-
}
188-
189188
protected lazy val select: Parser[LogicalPlan] =
190189
SELECT ~> opt(DISTINCT) ~ projections ~
191190
opt(from) ~ opt(filter) ~

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -113,9 +113,7 @@ case class NativeCommand(cmd: String) extends Command
113113
/**
114114
* Commands of the form "set key=value".
115115
*/
116-
case class SetCommand(key: String, value: String) extends Command {
117-
override def toString = s"<command> set $key=$value"
118-
}
116+
case class SetCommand(key: String, value: String) extends Command
119117

120118
/**
121119
* Returned by a parser when the users only wants to see what query plan would be executed, without

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -282,7 +282,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
282282
logical match {
283283
case SetCommand(key, value) =>
284284
sqlConf.set(key, value)
285-
emptyResult
285+
emptyResult // TODO: should this return something else?
286286
case _ => executedPlan.execute()
287287
}
288288
}

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
2929
self: SQLContext#SparkPlanner =>
3030

3131
object HashJoin extends Strategy with PredicateHelper {
32-
3332
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
3433
// Find inner joins where at least some predicates can be evaluated by matching hash keys
3534
// using the HashFilteredJoin pattern.

sql/core/src/test/scala/org/apache/spark/sql/SQLConfSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ class SQLConfSuite extends QueryTest {
4949
conf.clear()
5050
}
5151

52-
test("SQLConf picks up SQL set commands") {
52+
test("parse SQL set commands") {
5353
sql(s"set $testKey=$testVal")
5454
assert(conf.get(testKey, testVal + "_") == testVal)
5555
assert(TestSQLContext.sqlConf.get(testKey, testVal + "_") == testVal)
@@ -59,12 +59,14 @@ class SQLConfSuite extends QueryTest {
5959
sql("set mapred.reduce.tasks = 40")
6060
assert(conf.get("mapred.reduce.tasks", "0") == "40")
6161

62-
6362
val key = "spark.sql.key"
6463
val vs = "val0,val_1,val2.3,my_table"
6564
sql(s"set $key=$vs")
6665
assert(conf.get(key, "0") == vs)
6766

67+
sql(s"set$key=")
68+
assert(conf.get(key, "0") == "")
69+
6870
conf.clear()
6971
}
7072

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -209,7 +209,7 @@ private[hive] object HiveQl {
209209
try {
210210
if (sql.toLowerCase.startsWith("set")) {
211211
val kvPair = sql.drop(3).split("=")
212-
SetCommand(kvPair(0).trim, kvPair(1).trim)
212+
SetCommand(kvPair(0).trim, if (kvPair.size > 1) kvPair(1).trim else "")
213213
} else if (sql.toLowerCase.startsWith("add jar")) {
214214
AddJar(sql.drop(8))
215215
} else if (sql.toLowerCase.startsWith("add file")) {

0 commit comments

Comments
 (0)