@@ -21,7 +21,7 @@ import java.io.IOException
2121import java .lang .{Long => JLong }
2222import java .text .SimpleDateFormat
2323import java .util .concurrent .{Callable , TimeUnit }
24- import java .util .{ArrayList , Date , List => JList }
24+ import java .util .{ArrayList , Collections , Date , List => JList }
2525
2626import scala .collection .JavaConversions ._
2727import scala .collection .mutable
@@ -336,7 +336,9 @@ private[parquet] class FilteringParquetRowInputFormat
336336 val cacheMetadata = conf.getBoolean(SQLConf .PARQUET_CACHE_METADATA , false )
337337 val statuses = listStatus(jobContext)
338338 fileStatuses = statuses.map(file => file.getPath -> file).toMap
339- if (! cacheMetadata) {
339+ if (statuses.isEmpty) {
340+ footers = Collections .emptyList[Footer ]
341+ } else if (! cacheMetadata) {
340342 // Read the footers from HDFS
341343 footers = getFooters(conf, statuses)
342344 } else {
@@ -439,7 +441,7 @@ private[parquet] object FilteringParquetRowInputFormat {
439441
440442 private val blockLocationCache = CacheBuilder .newBuilder()
441443 .maximumSize(20000 )
442- .expireAfterWrite(15 , TimeUnit .MINUTES ) // Expire locations since HDFS nodes might fail
444+ .expireAfterWrite(15 , TimeUnit .MINUTES ) // Expire locations since HDFS files might move
443445 .build[FileStatus , Array [BlockLocation ]]()
444446}
445447
0 commit comments