File tree Expand file tree Collapse file tree 2 files changed +4
-6
lines changed
core/src/main/scala/org/apache/spark/sql
hive/src/main/scala/org/apache/spark/sql/hive Expand file tree Collapse file tree 2 files changed +4
-6
lines changed Original file line number Diff line number Diff line change 1717
1818package org .apache .spark .sql
1919
20- import org . apache . spark . sql . catalyst . CatalystConf
20+ import java . util . Properties
2121
2222import scala .collection .immutable
2323import scala .collection .JavaConversions ._
2424
25- import java . util . Properties
25+ import org . apache . spark . sql . catalyst . CatalystConf
2626
2727private [spark] object SQLConf {
2828 val COMPRESS_CACHED = " spark.sql.inMemoryColumnarStorage.compressed"
@@ -266,3 +266,4 @@ private[sql] class SQLConf extends Serializable with CatalystConf {
266266 settings.clear()
267267 }
268268}
269+
Original file line number Diff line number Diff line change @@ -50,10 +50,6 @@ import org.apache.spark.sql.types._
5050class HiveContext (sc : SparkContext ) extends SQLContext (sc) {
5151 self =>
5252
53- protected [sql] override lazy val conf : SQLConf = new SQLConf {
54- override def dialect : String = getConf(SQLConf .DIALECT , " hiveql" )
55- }
56-
5753 /**
5854 * When true, enables an experimental feature where metastore tables that use the parquet SerDe
5955 * are automatically converted to use the Spark SQL parquet table scan, instead of the Hive
@@ -266,6 +262,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
266262 protected [hive] class SQLSession extends super .SQLSession {
267263 protected [sql] override lazy val conf : SQLConf = new SQLConf {
268264 override def dialect : String = getConf(SQLConf .DIALECT , " hiveql" )
265+ setConf(CatalystConf .CASE_SENSITIVE , " false" )
269266 }
270267
271268 protected [hive] lazy val hiveconf : HiveConf = {
You can’t perform that action at this time.
0 commit comments