-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-24638][SQL] StringStartsWith support push down #21623
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
5b52ace
02f41cc
4f25a33
e959d1a
536610e
800fde7
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -55,7 +55,8 @@ import org.apache.spark.util.{AccumulatorContext, AccumulatorV2} | |
| */ | ||
| class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContext { | ||
|
|
||
| private lazy val parquetFilters = new ParquetFilters(conf.parquetFilterPushDownDate) | ||
| private lazy val parquetFilters = | ||
| new ParquetFilters(conf.parquetFilterPushDownDate, conf.parquetFilterPushDownStringStartWith) | ||
|
|
||
| override def beforeEach(): Unit = { | ||
| super.beforeEach() | ||
|
|
@@ -82,6 +83,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex | |
| withSQLConf( | ||
| SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true", | ||
| SQLConf.PARQUET_FILTER_PUSHDOWN_DATE_ENABLED.key -> "true", | ||
| SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_STARTSWITH_ENABLED.key -> "true", | ||
| SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") { | ||
| val query = df | ||
| .select(output.map(e => Column(e)): _*) | ||
|
|
@@ -140,6 +142,31 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex | |
| checkBinaryFilterPredicate(predicate, filterClass, Seq(Row(expected)))(df) | ||
| } | ||
|
|
||
| // This function tests that exactly go through the `canDrop` and `inverseCanDrop`. | ||
| private def testStringStartsWith(dataFrame: DataFrame, filter: String): Unit = { | ||
| withTempPath { dir => | ||
| val path = dir.getCanonicalPath | ||
| dataFrame.write.option("parquet.block.size", 512).parquet(path) | ||
| Seq(true, false).foreach { pushDown => | ||
| withSQLConf( | ||
| SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_STARTSWITH_ENABLED.key -> pushDown.toString) { | ||
| val accu = new NumRowGroupsAcc | ||
| sparkContext.register(accu) | ||
|
|
||
| val df = spark.read.parquet(path).filter(filter) | ||
| df.foreachPartition((it: Iterator[Row]) => it.foreach(v => accu.add(0))) | ||
| if (pushDown) { | ||
| assert(accu.value == 0) | ||
| } else { | ||
| assert(accu.value > 0) | ||
| } | ||
|
|
||
| AccumulatorContext.remove(accu.id) | ||
| } | ||
| } | ||
| } | ||
| } | ||
|
|
||
| test("filter pushdown - boolean") { | ||
| withParquetDataFrame((true :: false :: Nil).map(b => Tuple1.apply(Option(b)))) { implicit df => | ||
| checkFilterPredicate('_1.isNull, classOf[Eq[_]], Seq.empty[Row]) | ||
|
|
@@ -574,7 +601,6 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex | |
|
|
||
| val df = spark.read.parquet(path).filter("a < 100") | ||
| df.foreachPartition((it: Iterator[Row]) => it.foreach(v => accu.add(0))) | ||
| df.collect | ||
|
|
||
| if (enablePushDown) { | ||
| assert(accu.value == 0) | ||
|
|
@@ -660,6 +686,60 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex | |
| assert(df.where("col > 0").count() === 2) | ||
| } | ||
| } | ||
|
|
||
| test("filter pushdown - StringStartsWith") { | ||
| withParquetDataFrame((1 to 4).map(i => Tuple1(i + "str" + i))) { implicit df => | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think that all of these tests go through the
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Added |
||
| checkFilterPredicate( | ||
| '_1.startsWith("").asInstanceOf[Predicate], | ||
| classOf[UserDefinedByInstance[_, _]], | ||
| Seq("1str1", "2str2", "3str3", "4str4").map(Row(_))) | ||
|
|
||
| Seq("2", "2s", "2st", "2str", "2str2").foreach { prefix => | ||
| checkFilterPredicate( | ||
| '_1.startsWith(prefix).asInstanceOf[Predicate], | ||
| classOf[UserDefinedByInstance[_, _]], | ||
| "2str2") | ||
| } | ||
|
|
||
| Seq("2S", "null", "2str22").foreach { prefix => | ||
| checkFilterPredicate( | ||
| '_1.startsWith(prefix).asInstanceOf[Predicate], | ||
| classOf[UserDefinedByInstance[_, _]], | ||
| Seq.empty[Row]) | ||
| } | ||
|
|
||
| checkFilterPredicate( | ||
| !'_1.startsWith("").asInstanceOf[Predicate], | ||
| classOf[UserDefinedByInstance[_, _]], | ||
| Seq().map(Row(_))) | ||
|
|
||
| Seq("2", "2s", "2st", "2str", "2str2").foreach { prefix => | ||
| checkFilterPredicate( | ||
| !'_1.startsWith(prefix).asInstanceOf[Predicate], | ||
| classOf[UserDefinedByInstance[_, _]], | ||
| Seq("1str1", "3str3", "4str4").map(Row(_))) | ||
| } | ||
|
|
||
| Seq("2S", "null", "2str22").foreach { prefix => | ||
| checkFilterPredicate( | ||
| !'_1.startsWith(prefix).asInstanceOf[Predicate], | ||
| classOf[UserDefinedByInstance[_, _]], | ||
| Seq("1str1", "2str2", "3str3", "4str4").map(Row(_))) | ||
| } | ||
|
|
||
| assertResult(None) { | ||
| parquetFilters.createFilter( | ||
| df.schema, | ||
| sources.StringStartsWith("_1", null)) | ||
|
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Thanks @attilapiros , |
||
| } | ||
| } | ||
|
|
||
| import testImplicits._ | ||
| // Test canDrop() has taken effect | ||
| testStringStartsWith(spark.range(1024).map(_.toString).toDF(), "value like 'a%'") | ||
| // Test inverseCanDrop() has taken effect | ||
| testStringStartsWith(spark.range(1024).map(c => "100").toDF(), "value not like '10%'") | ||
| } | ||
| } | ||
|
|
||
| class NumRowGroupsAcc extends AccumulatorV2[Integer, Integer] { | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It would be better if we added
.enabledpostfix.