-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-32257][SQL] Reports explicit errors for invalid usage of SET/RESET command #29146
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
5f2516b
56da1b4
db8387a
cdc9057
7a79e28
e3e560c
2bb369a
14bcd33
c70b472
725f777
41d5b15
8fece5f
f0206c7
6d5c653
8123f2e
52d1143
72614fc
a2dc5f2
5a0b008
6d2221d
4822210
5a80e78
b0e9686
473353b
3065ed9
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -17,6 +17,9 @@ | |
|
|
||
| package org.apache.spark.sql.execution | ||
|
|
||
| import scala.collection.JavaConverters._ | ||
|
|
||
| import org.apache.spark.internal.config.ConfigEntry | ||
| import org.apache.spark.sql.SaveMode | ||
| import org.apache.spark.sql.catalyst.TableIdentifier | ||
| import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedAlias, UnresolvedAttribute, UnresolvedRelation, UnresolvedStar} | ||
|
|
@@ -25,7 +28,7 @@ import org.apache.spark.sql.catalyst.expressions.{Ascending, Concat, SortOrder} | |
| import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project, RepartitionByExpression, Sort} | ||
| import org.apache.spark.sql.execution.command._ | ||
| import org.apache.spark.sql.execution.datasources.{CreateTable, RefreshResource} | ||
| import org.apache.spark.sql.internal.{HiveSerDe, SQLConf} | ||
| import org.apache.spark.sql.internal.{HiveSerDe, SQLConf, StaticSQLConf} | ||
| import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructType} | ||
|
|
||
| /** | ||
|
|
@@ -61,6 +64,82 @@ class SparkSqlParserSuite extends AnalysisTest { | |
| private def intercept(sqlCommand: String, messages: String*): Unit = | ||
| interceptParseException(parser.parsePlan)(sqlCommand, messages: _*) | ||
|
|
||
| test("Checks if SET/RESET can parse all the configurations") { | ||
| // Force to build static SQL configurations | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. How about spark conf ? Check this PR https://github.com/apache/spark/pull/23031/files
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Added tests for the Spark confs, too. |
||
| StaticSQLConf | ||
| ConfigEntry.knownConfigs.values.asScala.foreach { config => | ||
| assertEqual(s"SET ${config.key}", SetCommand(Some(config.key -> None))) | ||
| if (config.defaultValue.isDefined && config.defaultValueString != null) { | ||
| assertEqual(s"SET ${config.key}=${config.defaultValueString}", | ||
| SetCommand(Some(config.key -> Some(config.defaultValueString)))) | ||
| } | ||
| assertEqual(s"RESET ${config.key}", ResetCommand(Some(config.key))) | ||
| } | ||
| } | ||
|
|
||
| test("Report Error for invalid usage of SET command") { | ||
| assertEqual("SET", SetCommand(None)) | ||
| assertEqual("SET -v", SetCommand(Some("-v", None))) | ||
| assertEqual("SET spark.sql.key", SetCommand(Some("spark.sql.key" -> None))) | ||
| assertEqual("SET spark.sql.key ", SetCommand(Some("spark.sql.key" -> None))) | ||
| assertEqual("SET spark:sql:key=false", SetCommand(Some("spark:sql:key" -> Some("false")))) | ||
| assertEqual("SET spark:sql:key=", SetCommand(Some("spark:sql:key" -> Some("")))) | ||
| assertEqual("SET spark:sql:key= ", SetCommand(Some("spark:sql:key" -> Some("")))) | ||
| assertEqual("SET spark:sql:key=-1 ", SetCommand(Some("spark:sql:key" -> Some("-1")))) | ||
| assertEqual("SET spark:sql:key = -1", SetCommand(Some("spark:sql:key" -> Some("-1")))) | ||
| assertEqual("SET 1.2.key=value", SetCommand(Some("1.2.key" -> Some("value")))) | ||
| assertEqual("SET spark.sql.3=4", SetCommand(Some("spark.sql.3" -> Some("4")))) | ||
| assertEqual("SET 1:2:key=value", SetCommand(Some("1:2:key" -> Some("value")))) | ||
| assertEqual("SET spark:sql:3=4", SetCommand(Some("spark:sql:3" -> Some("4")))) | ||
| assertEqual("SET 5=6", SetCommand(Some("5" -> Some("6")))) | ||
| assertEqual("SET spark:sql:key = va l u e ", | ||
| SetCommand(Some("spark:sql:key" -> Some("va l u e")))) | ||
| assertEqual("SET `spark.sql. key`=value", | ||
| SetCommand(Some("spark.sql. key" -> Some("value")))) | ||
| assertEqual("SET `spark.sql. key`= v a lu e ", | ||
| SetCommand(Some("spark.sql. key" -> Some("v a lu e")))) | ||
| assertEqual("SET `spark.sql. key`= -1", | ||
| SetCommand(Some("spark.sql. key" -> Some("-1")))) | ||
|
|
||
| val expectedErrMsg = "Expected format is 'SET', 'SET key', or " + | ||
| "'SET key=value'. If you want to include special characters in key, " + | ||
| "please use quotes, e.g., SET `ke y`=value." | ||
| intercept("SET spark.sql.key value", expectedErrMsg) | ||
| intercept("SET spark.sql.key 'value'", expectedErrMsg) | ||
| intercept("SET spark.sql.key \"value\" ", expectedErrMsg) | ||
| intercept("SET spark.sql.key value1 value2", expectedErrMsg) | ||
| intercept("SET spark. sql.key=value", expectedErrMsg) | ||
| intercept("SET spark :sql:key=value", expectedErrMsg) | ||
| intercept("SET spark . sql.key=value", expectedErrMsg) | ||
| intercept("SET spark.sql. key=value", expectedErrMsg) | ||
| intercept("SET spark.sql :key=value", expectedErrMsg) | ||
| intercept("SET spark.sql . key=value", expectedErrMsg) | ||
| } | ||
|
|
||
| test("Report Error for invalid usage of RESET command") { | ||
| assertEqual("RESET", ResetCommand(None)) | ||
| assertEqual("RESET spark.sql.key", ResetCommand(Some("spark.sql.key"))) | ||
| assertEqual("RESET spark.sql.key ", ResetCommand(Some("spark.sql.key"))) | ||
| assertEqual("RESET 1.2.key ", ResetCommand(Some("1.2.key"))) | ||
| assertEqual("RESET spark.sql.3", ResetCommand(Some("spark.sql.3"))) | ||
| assertEqual("RESET 1:2:key ", ResetCommand(Some("1:2:key"))) | ||
| assertEqual("RESET spark:sql:3", ResetCommand(Some("spark:sql:3"))) | ||
| assertEqual("RESET `spark.sql. key`", ResetCommand(Some("spark.sql. key"))) | ||
|
|
||
| val expectedErrMsg = "Expected format is 'RESET' or 'RESET key'. " + | ||
| "If you want to include special characters in key, " + | ||
| "please use quotes, e.g., RESET `ke y`." | ||
| intercept("RESET spark.sql.key1 key2", expectedErrMsg) | ||
| intercept("RESET spark. sql.key1 key2", expectedErrMsg) | ||
| intercept("RESET spark.sql.key1 key2 key3", expectedErrMsg) | ||
| intercept("RESET spark: sql:key", expectedErrMsg) | ||
| intercept("RESET spark .sql.key", expectedErrMsg) | ||
| intercept("RESET spark : sql:key", expectedErrMsg) | ||
| intercept("RESET spark.sql: key", expectedErrMsg) | ||
| intercept("RESET spark.sql .key", expectedErrMsg) | ||
| intercept("RESET spark.sql : key", expectedErrMsg) | ||
| } | ||
|
|
||
| test("refresh resource") { | ||
| assertEqual("REFRESH prefix_path", RefreshResource("prefix_path")) | ||
| assertEqual("REFRESH /", RefreshResource("/")) | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -115,7 +115,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { | |
| sql(s"set ${SQLConf.Deprecated.MAPRED_REDUCE_TASKS}=10") | ||
| assert(spark.conf.get(SQLConf.SHUFFLE_PARTITIONS) === 10) | ||
| } finally { | ||
| sql(s"set ${SQLConf.SHUFFLE_PARTITIONS}=$original") | ||
| sql(s"set ${SQLConf.SHUFFLE_PARTITIONS.key}=$original") | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The existing code looks incorrect. |
||
| } | ||
| } | ||
|
|
||
|
|
@@ -146,7 +146,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { | |
| assert(spark.conf.get(SQLConf.GROUP_BY_ORDINAL)) | ||
| assert(sql(s"set").where(s"key = '${SQLConf.GROUP_BY_ORDINAL.key}'").count() == 0) | ||
| } finally { | ||
| sql(s"set ${SQLConf.GROUP_BY_ORDINAL}=$original") | ||
| sql(s"set ${SQLConf.GROUP_BY_ORDINAL.key}=$original") | ||
| } | ||
| } | ||
|
|
||
|
|
@@ -162,7 +162,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { | |
| assert(spark.conf.get(SQLConf.OPTIMIZER_MAX_ITERATIONS) === 100) | ||
| assert(sql(s"set").where(s"key = '${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}'").count() == 0) | ||
| } finally { | ||
| sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS}=$original") | ||
| sql(s"set ${SQLConf.OPTIMIZER_MAX_ITERATIONS.key}=$original") | ||
| } | ||
| } | ||
|
|
||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.