File tree Expand file tree Collapse file tree 1 file changed +8
-8
lines changed
sql/core/src/main/scala/org/apache/spark/sql/sources Expand file tree Collapse file tree 1 file changed +8
-8
lines changed Original file line number Diff line number Diff line change @@ -91,7 +91,10 @@ private[sql] object DataSourceStrategy extends Strategy with Logging {
9191 val inputPaths = t.paths.map(new Path (_)).flatMap { path =>
9292 val fs = path.getFileSystem(t.sqlContext.sparkContext.hadoopConfiguration)
9393 val qualifiedPath = fs.makeQualified(path)
94- SparkHadoopUtil .get.listLeafStatuses(fs, qualifiedPath).map(_.getPath.toString)
94+ SparkHadoopUtil .get.listLeafStatuses(fs, qualifiedPath).map(_.getPath).filterNot { path =>
95+ val name = path.getName
96+ name.startsWith(" _" ) || name.startsWith(" ." )
97+ }.map(fs.makeQualified(_).toString)
9598 }
9699
97100 pruneFilterProject(
@@ -131,13 +134,10 @@ private[sql] object DataSourceStrategy extends Strategy with Logging {
131134 val dataFilePaths = {
132135 val dirPath = new Path (dir)
133136 val fs = dirPath.getFileSystem(SparkHadoopUtil .get.conf)
134- fs.listStatus(dirPath)
135- .map(_.getPath)
136- .filter { path =>
137- val name = path.getName
138- name.startsWith(" _" ) || name.startsWith(" ." )
139- }
140- .map(fs.makeQualified(_).toString)
137+ fs.listStatus(dirPath).map(_.getPath).filterNot { path =>
138+ val name = path.getName
139+ name.startsWith(" _" ) || name.startsWith(" ." )
140+ }.map(fs.makeQualified(_).toString)
141141 }
142142
143143 // The table scan operator (PhysicalRDD) which retrieves required columns from data files.
You can’t perform that action at this time.
0 commit comments