Skip to content
Closed
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,16 @@ abstract class PartitioningAwareFileIndex(
if (!fs.isDirectory(userDefinedBasePath)) {
throw new IllegalArgumentException(s"Option '$BASE_PATH_PARAM' must be a directory")
}
Set(fs.makeQualified(userDefinedBasePath))
def qualifiedPath(path: Path): String = fs.makeQualified(path).toString

val qualifiedBasePath = qualifiedPath(userDefinedBasePath)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

let's call toString here, to avoid calling toString later many times

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can even call toString in qualifiedPath and remove the needs to call .toString altogether.

rootPaths
.find(p => !qualifiedPath(p).startsWith(qualifiedBasePath))
.foreach { rp =>
throw new IllegalArgumentException(
s"Wrong basePath $userDefinedBasePath for the root path: $rp")
}
Set(new Path(qualifiedBasePath))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should reduce overhead as possible as we can

val qualifiedBasePath = fs.makeQualified(userDefinedBasePath)
val qualifiedBasePathStr = qualifiedBasePath.toString
rootPaths.find...
Set(qualifiedBasePath)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok, I see.


case None =>
rootPaths.map { path =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -352,6 +352,26 @@ class FileIndexSuite extends SharedSparkSession {
"driver side must not be negative"))
}

test ("SPARK-29537: throw exception when user defined a wrong base path") {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

let's also add an end-to-end test with DataFrameReader

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added 261b9ad

withTempDir { dir =>
val partitionDirectory = new File(dir, "a=foo")
partitionDirectory.mkdir()
val file = new File(partitionDirectory, "text.txt")
stringToFile(file, "text")
val path = new Path(dir.getCanonicalPath)
val wrongBasePath = new File(dir, "unknown")
// basePath must be a directory
wrongBasePath.mkdir()
val parameters = Map("basePath" -> wrongBasePath.getCanonicalPath)
val fileIndex = new InMemoryFileIndex(spark, Seq(path), parameters, None)
val msg = intercept[IllegalArgumentException] {
// trigger inferPartitioning()
fileIndex.partitionSpec()
}.getMessage
assert(msg === s"Wrong basePath ${wrongBasePath.getCanonicalPath} for the root path: $path")
}
}

test("refresh for InMemoryFileIndex with FileStatusCache") {
withTempDir { dir =>
val fileStatusCache = FileStatusCache.getOrCreate(spark)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,21 @@ class DataFrameReaderWriterSuite extends QueryTest with SharedSparkSession with
assert(DataSourceUtils.decodePartitioningColumns(partColumns) === Seq("col1", "col2"))
}

test ("SPARK-29537: throw exception when user defined a wrong base path") {
withTempPath { p =>
val path = new Path(p.toURI).toString
Seq((1, 1), (2, 2)).toDF("c1", "c2")
.write.partitionBy("c1").mode(SaveMode.Overwrite).parquet(path)
val wrongBasePath = new File(p, "unknown")
// basePath must be a directory
wrongBasePath.mkdir()
val msg = intercept[IllegalArgumentException] {
spark.read.option("basePath", wrongBasePath.getCanonicalPath).parquet(path)
}.getMessage
assert(msg === s"Wrong basePath ${wrongBasePath.getCanonicalPath} for the root path: $path")
}
}

test("save mode") {
spark.range(10).write
.format("org.apache.spark.sql.test")
Expand Down