Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -729,7 +729,7 @@ private[client] class Shim_v0_13 extends Shim_v0_12 {
def unapply(expr: Expression): Option[Attribute] = {
expr match {
case attr: Attribute => Some(attr)
case Cast(child @ AtomicType(), dt: AtomicType, _)
case Cast(child @ IntegralType(), dt: IntegralType, _)
if Cast.canUpCast(child.dataType.asInstanceOf[AtomicType], dt) => unapply(child)
case _ => None
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types.{BooleanType, IntegerType, LongType, StructType}
import org.apache.spark.sql.types.{BooleanType, IntegerType, LongType, StringType, StructType}
import org.apache.spark.util.Utils

class HivePartitionFilteringSuite(version: String)
Expand Down Expand Up @@ -290,6 +290,13 @@ class HivePartitionFilteringSuite(version: String)
(20170101 to 20170103, 0 to 4, Seq("ab", "bb")) :: Nil)
}

test("getPartitionsByFilter: chunk in ('ab', 'ba') and ((cast(ds as string)>'20170102')") {
val day = (20170101 to 20170103, 0 to 4, Seq("ab", "ba"))
testMetastorePartitionFiltering(
attr("chunk").in("ab", "ba") && (attr("ds").cast(StringType) > "20170102"),
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What happens for 20170102.1234?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is the same because we didn't pruning it:

diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
index 7e10d49..6b976d9 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
@@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.catalog._
 import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.catalyst.expressions._
-import org.apache.spark.sql.types.{BooleanType, IntegerType, LongType, StructType}
+import org.apache.spark.sql.types.{BooleanType, IntegerType, LongType, StringType, StructType}
 import org.apache.spark.util.Utils

 class HivePartitionFilteringSuite(version: String)
@@ -272,6 +272,15 @@ class HivePartitionFilteringSuite(version: String)
       day1 :: day2 :: Nil)
   }

+
+  test("getPartitionsByFilter: chunk in ('ab', 'ba') and " +
+    "((cast(ds as string)='20170101') or (cast(ds as string)='20170102'))") {
+    val day = (20170101 to 20170103, 0 to 4, Seq("ab", "ba"))
+    testMetastorePartitionFiltering(attr("chunk").in("ab", "ba") &&
+      (attr("ds").cast(StringType) > "20170102.1234"),
+      day :: Nil)
+  }
+

day :: Nil)
}

private def testMetastorePartitionFiltering(
filterExpr: Expression,
expectedDs: Seq[Int],
Expand Down