diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index d45ceca16714..def99c820af8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -105,11 +105,11 @@ class SQLQueryTestSuite extends QueryTest with SharedSQLContext { private val inputFilePath = new File(baseResourcePath, "inputs").getAbsolutePath private val goldenFilePath = new File(baseResourcePath, "results").getAbsolutePath + private val validFileExtensions = ".sql" + /** List of test cases to ignore, in lower cases. */ private val blackList = Set( - "blacklist.sql", // Do NOT remove this one. It is here to test the blacklist functionality. - ".DS_Store" // A meta-file that may be created on Mac by Finder App. - // We should ignore this file from processing. + "blacklist.sql" // Do NOT remove this one. It is here to test the blacklist functionality. ) // Create all the test cases. @@ -329,7 +329,10 @@ class SQLQueryTestSuite extends QueryTest with SharedSQLContext { /** Returns all the files (not directories) in a directory, recursively. */ private def listFilesRecursively(path: File): Seq[File] = { val (dirs, files) = path.listFiles().partition(_.isDirectory) - files ++ dirs.flatMap(listFilesRecursively) + // Filter out test files with invalid extensions such as temp files created + // by vi (.swp), Mac (.DS_Store) etc. + val filteredFiles = files.filter(_.getName.endsWith(validFileExtensions)) + filteredFiles ++ dirs.flatMap(listFilesRecursively) } /** Load built-in test tables into the SparkSession. */