-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-17153][SQL] Should read partition data when reading new files in filestream without globbing #14803
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[SPARK-17153][SQL] Should read partition data when reading new files in filestream without globbing #14803
Changes from all commits
2771d71
0d841e2
6adf2e2
f7c9067
04b61c7
23ba9a2
5b101ab
541dfdc
9d16631
e21536e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -102,6 +102,12 @@ class FileStreamSourceTest extends StreamTest with SharedSQLContext with Private | |
| } | ||
| } | ||
|
|
||
| case class DeleteFile(file: File) extends ExternalAction { | ||
| def runAction(): Unit = { | ||
| Utils.deleteRecursively(file) | ||
| } | ||
| } | ||
|
|
||
| /** Use `format` and `path` to create FileStreamSource via DataFrameReader */ | ||
| def createFileStream( | ||
| format: String, | ||
|
|
@@ -608,6 +614,81 @@ class FileStreamSourceSuite extends FileStreamSourceTest { | |
|
|
||
| // =============== other tests ================ | ||
|
|
||
| test("read new files in partitioned table without globbing, should read partition data") { | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We should probably also have an explicit test for the case where schema inference is turned on (you implicitly test it some with the code changed below)
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Added a test for it. |
||
| withTempDirs { case (dir, tmp) => | ||
| val partitionFooSubDir = new File(dir, "partition=foo") | ||
| val partitionBarSubDir = new File(dir, "partition=bar") | ||
|
|
||
| val schema = new StructType().add("value", StringType).add("partition", StringType) | ||
| val fileStream = createFileStream("json", s"${dir.getCanonicalPath}", Some(schema)) | ||
| val filtered = fileStream.filter($"value" contains "keep") | ||
| testStream(filtered)( | ||
| // Create new partition=foo sub dir and write to it | ||
| AddTextFileData("{'value': 'drop1'}\n{'value': 'keep2'}", partitionFooSubDir, tmp), | ||
| CheckAnswer(("keep2", "foo")), | ||
|
|
||
| // Append to same partition=foo sub dir | ||
| AddTextFileData("{'value': 'keep3'}", partitionFooSubDir, tmp), | ||
| CheckAnswer(("keep2", "foo"), ("keep3", "foo")), | ||
|
|
||
| // Create new partition sub dir and write to it | ||
| AddTextFileData("{'value': 'keep4'}", partitionBarSubDir, tmp), | ||
| CheckAnswer(("keep2", "foo"), ("keep3", "foo"), ("keep4", "bar")), | ||
|
|
||
| // Append to same partition=bar sub dir | ||
| AddTextFileData("{'value': 'keep5'}", partitionBarSubDir, tmp), | ||
| CheckAnswer(("keep2", "foo"), ("keep3", "foo"), ("keep4", "bar"), ("keep5", "bar")) | ||
| ) | ||
| } | ||
| } | ||
|
|
||
| test("when schema inference is turned on, should read partition data") { | ||
| def createFile(content: String, src: File, tmp: File): Unit = { | ||
| val tempFile = Utils.tempFileWith(new File(tmp, "text")) | ||
| val finalFile = new File(src, tempFile.getName) | ||
| src.mkdirs() | ||
| require(stringToFile(tempFile, content).renameTo(finalFile)) | ||
| } | ||
|
|
||
| withSQLConf(SQLConf.STREAMING_SCHEMA_INFERENCE.key -> "true") { | ||
| withTempDirs { case (dir, tmp) => | ||
| val partitionFooSubDir = new File(dir, "partition=foo") | ||
| val partitionBarSubDir = new File(dir, "partition=bar") | ||
|
|
||
| // Create file in partition, so we can infer the schema. | ||
| createFile("{'value': 'drop0'}", partitionFooSubDir, tmp) | ||
|
|
||
| val fileStream = createFileStream("json", s"${dir.getCanonicalPath}") | ||
| val filtered = fileStream.filter($"value" contains "keep") | ||
| testStream(filtered)( | ||
| // Append to same partition=foo sub dir | ||
| AddTextFileData("{'value': 'drop1'}\n{'value': 'keep2'}", partitionFooSubDir, tmp), | ||
| CheckAnswer(("keep2", "foo")), | ||
|
|
||
| // Append to same partition=foo sub dir | ||
| AddTextFileData("{'value': 'keep3'}", partitionFooSubDir, tmp), | ||
| CheckAnswer(("keep2", "foo"), ("keep3", "foo")), | ||
|
|
||
| // Create new partition sub dir and write to it | ||
| AddTextFileData("{'value': 'keep4'}", partitionBarSubDir, tmp), | ||
| CheckAnswer(("keep2", "foo"), ("keep3", "foo"), ("keep4", "bar")), | ||
|
|
||
| // Append to same partition=bar sub dir | ||
| AddTextFileData("{'value': 'keep5'}", partitionBarSubDir, tmp), | ||
| CheckAnswer(("keep2", "foo"), ("keep3", "foo"), ("keep4", "bar"), ("keep5", "bar")), | ||
|
|
||
| // Delete the two partition dirs | ||
| DeleteFile(partitionFooSubDir), | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @viirya why need to delete dirs in this test? It's flaky since the source maybe is listing files.
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Removed them in #15699
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @zsxwing I remember it is used to simulate the partition is deleted and re-inserted data. Thanks for fixing this! |
||
| DeleteFile(partitionBarSubDir), | ||
|
|
||
| AddTextFileData("{'value': 'keep6'}", partitionBarSubDir, tmp), | ||
| CheckAnswer(("keep2", "foo"), ("keep3", "foo"), ("keep4", "bar"), ("keep5", "bar"), | ||
| ("keep6", "bar")) | ||
| ) | ||
| } | ||
| } | ||
| } | ||
|
|
||
| test("fault tolerance") { | ||
| withTempDirs { case (src, tmp) => | ||
| val fileStream = createFileStream("text", src.getCanonicalPath) | ||
|
|
@@ -792,7 +873,7 @@ class FileStreamSourceSuite extends FileStreamSourceTest { | |
| } | ||
| assert(src.listFiles().size === numFiles) | ||
|
|
||
| val files = spark.readStream.text(root.getCanonicalPath).as[String] | ||
| val files = spark.readStream.text(root.getCanonicalPath).as[(String, Int)] | ||
|
|
||
| // Note this query will use constant folding to eliminate the file scan. | ||
| // This is to avoid actually running a Spark job with 10000 tasks | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Schema inference can lead to many corner cases regarding if the inferred schema is different after restart. So I think we should use a stronger language that schema inference is not advisable in production uses.