Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,15 @@ object BooleanSimplification extends Rule[LogicalPlan] with PredicateHelper {
case TrueLiteral Or _ => TrueLiteral
case _ Or TrueLiteral => TrueLiteral

case a And b if Not(a).semanticEquals(b) => FalseLiteral
case a Or b if Not(a).semanticEquals(b) => TrueLiteral
case a And b if a.semanticEquals(Not(b)) => FalseLiteral
case a Or b if a.semanticEquals(Not(b)) => TrueLiteral
case a And b if Not(a).semanticEquals(b) =>
If(IsNull(a), Literal.create(null, a.dataType), FalseLiteral)
case a And b if a.semanticEquals(Not(b)) =>
If(IsNull(b), Literal.create(null, b.dataType), FalseLiteral)

case a Or b if Not(a).semanticEquals(b) =>
If(IsNull(a), Literal.create(null, a.dataType), TrueLiteral)
case a Or b if a.semanticEquals(Not(b)) =>
If(IsNull(b), Literal.create(null, b.dataType), TrueLiteral)

case a And b if a.semanticEquals(b) => a
case a Or b if a.semanticEquals(b) => a
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

package org.apache.spark.sql.catalyst.optimizer

import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.catalog.{InMemoryCatalog, SessionCatalog}
import org.apache.spark.sql.catalyst.dsl.expressions._
Expand All @@ -26,7 +27,7 @@ import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.BooleanType

class BooleanSimplificationSuite extends PlanTest with PredicateHelper {

Expand All @@ -37,14 +38,24 @@ class BooleanSimplificationSuite extends PlanTest with PredicateHelper {
Batch("Constant Folding", FixedPoint(50),
NullPropagation(conf),
ConstantFolding,
SimplifyConditionals,
BooleanSimplification,
PruneFilters(conf)) :: Nil
}
Copy link
Member

@dongjoon-hyun dongjoon-hyun Sep 12, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

At line 46, SPARK-17851 changed like the following. We need that change. After updating that, it passes the test without any problems.

- val testRelation = LocalRelation('a.int, 'b.int, 'c.int, 'd.string)
+ val testRelation = LocalRelation('a.int, 'b.int, 'c.int, 'd.string,
   'e.boolean, 'f.boolean, 'g.boolean, 'h.boolean)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see. Thanks!


val testRelation = LocalRelation('a.int, 'b.int, 'c.int, 'd.string)
val testRelation = LocalRelation('a.int, 'b.int, 'c.int, 'd.string,
'e.boolean, 'f.boolean, 'g.boolean, 'h.boolean)

val testRelationWithData = LocalRelation.fromExternalRows(
testRelation.output, Seq(Row(1, 2, 3, "abc"))
testRelation.output, Seq(Row(1, 2, 3, "abc", true, null, true, null))
)

val testNotNullableRelation = LocalRelation('a.int.notNull, 'b.int.notNull, 'c.int.notNull,
'd.string.notNull, 'e.boolean.notNull, 'f.boolean.notNull, 'g.boolean.notNull,
'h.boolean.notNull)

val testNotNullableRelationWithData = LocalRelation.fromExternalRows(
testNotNullableRelation.output, Seq(Row(1, 2, 3, "abc", true, false, true, false))
)

private def checkCondition(input: Expression, expected: LogicalPlan): Unit = {
Expand All @@ -60,6 +71,13 @@ class BooleanSimplificationSuite extends PlanTest with PredicateHelper {
comparePlans(actual, correctAnswer)
}

private def checkConditionInNotNullableRelation(
input: Expression, expected: LogicalPlan): Unit = {
val plan = testNotNullableRelationWithData.where(input).analyze
val actual = Optimize.execute(plan)
comparePlans(actual, expected)
}

test("a && a => a") {
checkCondition(Literal(1) < 'a && Literal(1) < 'a, Literal(1) < 'a)
checkCondition(Literal(1) < 'a && Literal(1) < 'a && Literal(1) < 'a, Literal(1) < 'a)
Expand Down Expand Up @@ -173,10 +191,30 @@ class BooleanSimplificationSuite extends PlanTest with PredicateHelper {
}

test("Complementation Laws") {
checkCondition('a && !'a, testRelation)
checkCondition(!'a && 'a, testRelation)
checkConditionInNotNullableRelation('e && !'e, testNotNullableRelation)
checkConditionInNotNullableRelation(!'e && 'e, testNotNullableRelation)

checkConditionInNotNullableRelation('e || !'e, testNotNullableRelationWithData)
checkConditionInNotNullableRelation(!'e || 'e, testNotNullableRelationWithData)
}

test("Complementation Laws - null handling") {
checkCondition('e && !'e,
testRelationWithData.where(If('e.isNull, Literal.create(null, BooleanType), false)).analyze)
checkCondition(!'e && 'e,
testRelationWithData.where(If('e.isNull, Literal.create(null, BooleanType), false)).analyze)

checkCondition('e || !'e,
testRelationWithData.where(If('e.isNull, Literal.create(null, BooleanType), true)).analyze)
checkCondition(!'e || 'e,
testRelationWithData.where(If('e.isNull, Literal.create(null, BooleanType), true)).analyze)
}

test("Complementation Laws - negative case") {
checkCondition('e && !'f, testRelationWithData.where('e && !'f).analyze)
checkCondition(!'f && 'e, testRelationWithData.where(!'f && 'e).analyze)

checkCondition('a || !'a, testRelationWithData)
checkCondition(!'a || 'a, testRelationWithData)
checkCondition('e || !'f, testRelationWithData.where('e || !'f).analyze)
checkCondition(!'f || 'e, testRelationWithData.where(!'f || 'e).analyze)
}
}
10 changes: 10 additions & 0 deletions sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1789,4 +1789,14 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
test("SPARK-22469: compare string with decimal") {
checkAnswer(Seq("1.5").toDF("s").filter("s > 0.5"), Row("1.5"))
}

test("SPARK-25402 Null handling in BooleanSimplification") {
val schema = StructType.fromDDL("a boolean, b int")
val rows = Seq(Row(null, 1))

val rdd = sparkContext.parallelize(rows)
val df = spark.createDataFrame(rdd, schema)

checkAnswer(df.where("(NOT a) OR a"), Seq.empty)
}
}