From b4055e11b25c0c7cd83f34097425391abd5fe9b4 Mon Sep 17 00:00:00 2001 From: vinodkc Date: Sat, 5 Aug 2017 12:09:04 +0530 Subject: [PATCH 1/2] SQLContext.getConf(key, null) should return null --- .../scala/org/apache/spark/sql/internal/SQLConf.scala | 10 ++++++---- .../org/apache/spark/sql/internal/SQLConfSuite.scala | 6 ++++++ 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index a819cddcae98..ecb941c5fa9e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1228,10 +1228,12 @@ class SQLConf extends Serializable with Logging { * not set yet, return `defaultValue`. */ def getConfString(key: String, defaultValue: String): String = { - val entry = sqlConfEntries.get(key) - if (entry != null && defaultValue != "") { - // Only verify configs in the SQLConf object - entry.valueConverter(defaultValue) + if (defaultValue != null && defaultValue != "") { + val entry = sqlConfEntries.get(key) + if (entry != null) { + // Only verify configs in the SQLConf object + entry.valueConverter(defaultValue) + } } Option(settings.get(key)).getOrElse(defaultValue) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index a283ff971adc..878d072691da 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -270,4 +270,10 @@ class SQLConfSuite extends QueryTest with SharedSQLContext { val e2 = intercept[AnalysisException](spark.conf.unset(SCHEMA_STRING_LENGTH_THRESHOLD.key)) assert(e2.message.contains("Cannot modify the value of a static config")) } + + test("SPARK-21588 SQLContext.getConf(key, null) should return null") { + assert(null == spark.conf.get("spark.sql.thriftServer.incrementalCollect", null)) + assert("" == spark.conf.get( + "spark.sql.thriftServer.incrementalCollect", "")) + } } From 4295cd3342178285c2465c080cafa8dd8b68ba16 Mon Sep 17 00:00:00 2001 From: vinodkc Date: Sun, 6 Aug 2017 08:50:34 +0530 Subject: [PATCH 2/2] testcase updated --- .../org/apache/spark/sql/internal/SQLConfSuite.scala | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index 878d072691da..948f179f5e8f 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -272,8 +272,13 @@ class SQLConfSuite extends QueryTest with SharedSQLContext { } test("SPARK-21588 SQLContext.getConf(key, null) should return null") { - assert(null == spark.conf.get("spark.sql.thriftServer.incrementalCollect", null)) - assert("" == spark.conf.get( - "spark.sql.thriftServer.incrementalCollect", "")) + withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") { + assert("1" == spark.conf.get(SQLConf.SHUFFLE_PARTITIONS.key, null)) + assert("1" == spark.conf.get(SQLConf.SHUFFLE_PARTITIONS.key, "")) + } + + assert(spark.conf.getOption("spark.sql.nonexistent").isEmpty) + assert(null == spark.conf.get("spark.sql.nonexistent", null)) + assert("" == spark.conf.get("spark.sql.nonexistent", "")) } }