@@ -46,6 +46,8 @@ import scala.collection.JavaConversions._
4646private [hive] class HiveMetastoreCatalog (val client : ClientInterface , hive : HiveContext )
4747 extends Catalog with Logging {
4848
49+ val conf = hive.conf
50+
4951 /** Usages should lock on `this`. */
5052 protected [hive] lazy val hiveWarehouse = new Warehouse (hive.hiveconf)
5153
@@ -138,7 +140,7 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
138140 val tableProperties = new scala.collection.mutable.HashMap [String , String ]
139141 tableProperties.put(" spark.sql.sources.provider" , provider)
140142 if (userSpecifiedSchema.isDefined) {
141- val threshold = hive. conf.schemaStringLengthThreshold
143+ val threshold = conf.schemaStringLengthThreshold
142144 val schemaJsonString = userSpecifiedSchema.get.json
143145 // Split the JSON string.
144146 val parts = schemaJsonString.grouped(threshold).toSeq
@@ -345,7 +347,7 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
345347 // Inserting into partitioned table is not supported in Parquet data source (yet).
346348 if ! relation.hiveQlTable.isPartitioned &&
347349 hive.convertMetastoreParquet &&
348- hive. conf.parquetUseDataSourceApi &&
350+ conf.parquetUseDataSourceApi &&
349351 relation.tableDesc.getSerdeClassName.toLowerCase.contains(" parquet" ) =>
350352 val parquetRelation = convertToParquetRelation(relation)
351353 val attributedRewrites = relation.output.zip(parquetRelation.output)
@@ -356,7 +358,7 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
356358 // Inserting into partitioned table is not supported in Parquet data source (yet).
357359 if ! relation.hiveQlTable.isPartitioned &&
358360 hive.convertMetastoreParquet &&
359- hive. conf.parquetUseDataSourceApi &&
361+ conf.parquetUseDataSourceApi &&
360362 relation.tableDesc.getSerdeClassName.toLowerCase.contains(" parquet" ) =>
361363 val parquetRelation = convertToParquetRelation(relation)
362364 val attributedRewrites = relation.output.zip(parquetRelation.output)
@@ -365,7 +367,7 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
365367 // Read path
366368 case p @ PhysicalOperation (_, _, relation : MetastoreRelation )
367369 if hive.convertMetastoreParquet &&
368- hive. conf.parquetUseDataSourceApi &&
370+ conf.parquetUseDataSourceApi &&
369371 relation.tableDesc.getSerdeClassName.toLowerCase.contains(" parquet" ) =>
370372 val parquetRelation = convertToParquetRelation(relation)
371373 val attributedRewrites = relation.output.zip(parquetRelation.output)
@@ -425,7 +427,7 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
425427 val mode = if (allowExisting) SaveMode .Ignore else SaveMode .ErrorIfExists
426428 CreateTableUsingAsSelect (
427429 desc.name,
428- hive. conf.defaultDataSourceName,
430+ conf.defaultDataSourceName,
429431 temporary = false ,
430432 mode,
431433 options = Map .empty[String , String ],
@@ -454,7 +456,7 @@ private[hive] class HiveMetastoreCatalog(val client: ClientInterface, hive: Hive
454456 val mode = if (allowExisting) SaveMode .Ignore else SaveMode .ErrorIfExists
455457 CreateTableUsingAsSelect (
456458 tblName,
457- hive. conf.defaultDataSourceName,
459+ conf.defaultDataSourceName,
458460 temporary = false ,
459461 mode,
460462 options = Map .empty[String , String ],
0 commit comments