@@ -26,7 +26,7 @@ import org.apache.spark.deploy.SparkHadoopUtil
2626import org .apache .spark .rdd .RDD
2727import org .apache .spark .sql .catalyst .expressions .{Attribute , Expression }
2828import org .apache .spark .sql .types .{StructField , StructType }
29- import org .apache .spark .sql .{ DataFrame , Row , SQLContext , SaveMode }
29+ import org .apache .spark .sql ._
3030
3131/**
3232 * ::DeveloperApi::
@@ -338,9 +338,16 @@ abstract class FSBasedRelation private[sql](
338338 private var _partitionSpec : PartitionSpec = maybePartitionSpec.map { spec =>
339339 spec.copy(partitionColumns = spec.partitionColumns.asNullable)
340340 }.getOrElse {
341- discoverPartitions()
341+ if (partitionDiscoverEnabled()) {
342+ discoverPartitions()
343+ } else {
344+ PartitionSpec (StructType (Nil ), Array .empty[Partition ])
345+ }
342346 }
343347
348+ private def partitionDiscoverEnabled () =
349+ sqlContext.conf.getConf(SQLConf .PARTITION_DISCOVERY_ENABLED , " true" ).toBoolean
350+
344351 private [sql] def partitionSpec : PartitionSpec = _partitionSpec
345352
346353 /**
@@ -349,7 +356,9 @@ abstract class FSBasedRelation private[sql](
349356 def partitionColumns : StructType = partitionSpec.partitionColumns
350357
351358 private [sql] def refresh (): Unit = {
352- _partitionSpec = discoverPartitions()
359+ if (partitionDiscoverEnabled()) {
360+ _partitionSpec = discoverPartitions()
361+ }
353362 }
354363
355364 private def discoverPartitions (): PartitionSpec = {
0 commit comments