From d5a5575fd81c70f62fe54a807bf80c6d81f41d5f Mon Sep 17 00:00:00 2001 From: ulysses Date: Sat, 18 Jul 2020 18:35:32 +0800 Subject: [PATCH 01/11] init --- .../catalyst/analysis/ResolveSessionCatalog.scala | 7 ++++++- .../sql/execution/command/PlanResolutionSuite.scala | 13 ++++++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala index bc3f38a35834d..3701cf42914f5 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala @@ -529,6 +529,9 @@ class ResolveSessionCatalog( partitionSpec) case AlterViewAsStatement(name, originalText, query) => + if (query.resolved) { + assertNoNullTypeInSchema(query.schema) + } val viewName = parseTempViewOrV1Table(name, "ALTER VIEW QUERY") AlterViewAsCommand( viewName.asTableIdentifier, @@ -538,7 +541,9 @@ class ResolveSessionCatalog( case CreateViewStatement( tbl, userSpecifiedColumns, comment, properties, originalText, child, allowExisting, replace, viewType) => - + if (child.resolved) { + assertNoNullTypeInSchema(child.schema) + } val v1TableName = if (viewType != PersistedView) { // temp view doesn't belong to any catalog and we shouldn't resolve catalog in the name. tbl diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala index 2d6a5da6d67f7..cd5996cc93473 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala @@ -38,7 +38,7 @@ import org.apache.spark.sql.execution.datasources.CreateTable import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.sources.SimpleScanSource -import org.apache.spark.sql.types.{CharType, DoubleType, HIVE_TYPE_STRING, IntegerType, LongType, MetadataBuilder, StringType, StructField, StructType} +import org.apache.spark.sql.types.{CharType, DoubleType, HIVE_TYPE_STRING, IntegerType, LongType, MetadataBuilder, NullType, StringType, StructField, StructType} class PlanResolutionSuite extends AnalysisTest { import CatalystSqlParser._ @@ -1557,6 +1557,17 @@ class PlanResolutionSuite extends AnalysisTest { checkFailure("testcat.tab", "foo") } + test("SPARK-32356: forbid null type in create view") { + val sql1 = "create view v as select null as c" + val sql2 = "alter view v as select null as c" + Seq(sql1, sql2).foreach { sql => + val msg = intercept[AnalysisException] { + parseAndResolve(sql) + }.getMessage + assert(msg.contains(s"Cannot create tables with ${NullType.simpleString} type.")) + } + } + // TODO: add tests for more commands. } From 95dd3d257fb14189fbbbf158ae87823ad7d5dd7b Mon Sep 17 00:00:00 2001 From: ulysses Date: Mon, 20 Jul 2020 17:17:01 +0800 Subject: [PATCH 02/11] update error msg --- .../org/apache/spark/sql/connector/catalog/CatalogV2Util.scala | 2 +- .../spark/sql/execution/command/PlanResolutionSuite.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala index 1a3a7207c6ca9..a6d67f4683c15 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala @@ -356,7 +356,7 @@ private[sql] object CatalogV2Util { } if (containsNullType(dt)) { throw new AnalysisException( - s"Cannot create tables with ${NullType.simpleString} type.") + s"Cannot create tables/views with ${NullType.simpleString} type.") } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala index cd5996cc93473..0c52ded502064 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala @@ -1564,7 +1564,7 @@ class PlanResolutionSuite extends AnalysisTest { val msg = intercept[AnalysisException] { parseAndResolve(sql) }.getMessage - assert(msg.contains(s"Cannot create tables with ${NullType.simpleString} type.")) + assert(msg.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) } } From 3780ccd133259314f1295054ed7f085e702b3212 Mon Sep 17 00:00:00 2001 From: ulysses Date: Mon, 20 Jul 2020 17:24:34 +0800 Subject: [PATCH 03/11] add temp view test --- .../spark/sql/execution/command/PlanResolutionSuite.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala index 0c52ded502064..7820c14150fc1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala @@ -1560,7 +1560,9 @@ class PlanResolutionSuite extends AnalysisTest { test("SPARK-32356: forbid null type in create view") { val sql1 = "create view v as select null as c" val sql2 = "alter view v as select null as c" - Seq(sql1, sql2).foreach { sql => + val sql3 = "create temporary view v as select null as c" + val sql4 = "create global temporary view v as select null as c" + Seq(sql1, sql2, sql3, sql4).foreach { sql => val msg = intercept[AnalysisException] { parseAndResolve(sql) }.getMessage From 2e8d9f3c6fe51cb6e2c609beaafbecea7455294e Mon Sep 17 00:00:00 2001 From: ulysses Date: Mon, 20 Jul 2020 17:37:06 +0800 Subject: [PATCH 04/11] add dataset ut --- .../scala/org/apache/spark/sql/DataFrameSuite.scala | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 52ef5895ed9ad..9019ddc2163f0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -2540,6 +2540,17 @@ class DataFrameSuite extends QueryTest assert(df.schema === new StructType().add(StructField("d", DecimalType(38, 0)))) } } + + test("SPARK-32356: forbid null type in create view") { + val msg = intercept[AnalysisException] { + sql("select null as c").createTempView("null_type_view") + }.getMessage + assert(msg.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) + val msg2 = intercept[AnalysisException] { + sql("select null as c").createGlobalTempView("null_type_view") + }.getMessage + assert(msg2.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) + } } case class GroupByKey(a: Int, b: Int) From 9d77cb50049b774a3f06ceb339898787d05a5625 Mon Sep 17 00:00:00 2001 From: ulysses Date: Mon, 20 Jul 2020 20:26:37 +0800 Subject: [PATCH 05/11] move to SQLViewSuite --- .../scala/org/apache/spark/sql/DataFrameSuite.scala | 11 ----------- .../apache/spark/sql/execution/SQLViewSuite.scala | 12 ++++++++++++ 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 9019ddc2163f0..52ef5895ed9ad 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -2540,17 +2540,6 @@ class DataFrameSuite extends QueryTest assert(df.schema === new StructType().add(StructField("d", DecimalType(38, 0)))) } } - - test("SPARK-32356: forbid null type in create view") { - val msg = intercept[AnalysisException] { - sql("select null as c").createTempView("null_type_view") - }.getMessage - assert(msg.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) - val msg2 = intercept[AnalysisException] { - sql("select null as c").createGlobalTempView("null_type_view") - }.getMessage - assert(msg2.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) - } } case class GroupByKey(a: Int, b: Int) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index 575efec364812..bb033dbf92b89 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -22,6 +22,7 @@ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.internal.SQLConf.MAX_NESTED_VIEW_DEPTH import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils} +import org.apache.spark.sql.types.NullType class SimpleSQLViewSuite extends SQLViewSuite with SharedSparkSession @@ -735,4 +736,15 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { } } } + + test("SPARK-32356: forbid null type in create view") { + val msg = intercept[AnalysisException] { + sql("select null as c").createTempView("null_type_view") + }.getMessage + assert(msg.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) + val msg2 = intercept[AnalysisException] { + sql("select null as c").createGlobalTempView("null_type_view") + }.getMessage + assert(msg2.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) + } } From e1d92aea82a3c58ed8974cc466302c36285dd6c2 Mon Sep 17 00:00:00 2001 From: ulysses Date: Mon, 20 Jul 2020 20:32:08 +0800 Subject: [PATCH 06/11] update ut --- .../sql/hive/execution/HiveDDLSuite.scala | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index fbd1fc1ea98df..c9e4145c7ccec 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -2314,6 +2314,7 @@ class HiveDDLSuite } test("SPARK-20680: do not support for null column datatype") { + val errMsg = s"Cannot create tables/views with ${NullType.simpleString} type." withTable("t") { withView("tabNullType") { hiveClient.runSqlHive("CREATE TABLE t (t1 int)") @@ -2331,17 +2332,17 @@ class HiveDDLSuite val e1 = intercept[AnalysisException] { spark.sql("CREATE TABLE t1 USING PARQUET AS SELECT null as null_col") }.getMessage - assert(e1.contains("Cannot create tables with null type")) + assert(e1.contains(errMsg)) val e2 = intercept[AnalysisException] { spark.sql("CREATE TABLE t2 AS SELECT null as null_col") }.getMessage - assert(e2.contains("Cannot create tables with null type")) + assert(e2.contains(errMsg)) val e3 = intercept[AnalysisException] { spark.sql("CREATE TABLE t3 STORED AS PARQUET AS SELECT null as null_col") }.getMessage - assert(e3.contains("Cannot create tables with null type")) + assert(e3.contains(errMsg)) } // Forbid Replace table AS SELECT with null type @@ -2350,7 +2351,7 @@ class HiveDDLSuite val e = intercept[AnalysisException] { spark.sql(s"CREATE OR REPLACE TABLE t USING $v2Source AS SELECT null as null_col") }.getMessage - assert(e.contains("Cannot create tables with null type")) + assert(e.contains(errMsg)) } // Forbid creating table with VOID type in Spark @@ -2358,19 +2359,19 @@ class HiveDDLSuite val e1 = intercept[AnalysisException] { spark.sql(s"CREATE TABLE t1 (v VOID) USING PARQUET") }.getMessage - assert(e1.contains("Cannot create tables with null type")) + assert(e1.contains(errMsg)) val e2 = intercept[AnalysisException] { spark.sql(s"CREATE TABLE t2 (v VOID) USING hive") }.getMessage - assert(e2.contains("Cannot create tables with null type")) + assert(e2.contains(errMsg)) val e3 = intercept[AnalysisException] { spark.sql(s"CREATE TABLE t3 (v VOID)") }.getMessage - assert(e3.contains("Cannot create tables with null type")) + assert(e3.contains(errMsg)) val e4 = intercept[AnalysisException] { spark.sql(s"CREATE TABLE t4 (v VOID) STORED AS PARQUET") }.getMessage - assert(e4.contains("Cannot create tables with null type")) + assert(e4.contains(errMsg)) } // Forbid Replace table with VOID type @@ -2379,7 +2380,7 @@ class HiveDDLSuite val e = intercept[AnalysisException] { spark.sql(s"CREATE OR REPLACE TABLE t (v VOID) USING $v2Source") }.getMessage - assert(e.contains("Cannot create tables with null type")) + assert(e.contains(errMsg)) } // Make sure spark.catalog.createTable with null type will fail @@ -2416,7 +2417,7 @@ class HiveDDLSuite schema = schema, options = Map("fileFormat" -> "parquet")) }.getMessage - assert(e.contains("Cannot create tables with null type")) + assert(e.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) } } @@ -2429,7 +2430,7 @@ class HiveDDLSuite schema = schema, options = Map.empty[String, String]) }.getMessage - assert(e.contains("Cannot create tables with null type")) + assert(e.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) } } From 32ec11ab861160801bfb85ef639b728545888d21 Mon Sep 17 00:00:00 2001 From: ulysses Date: Tue, 21 Jul 2020 08:10:20 +0800 Subject: [PATCH 07/11] add view command check --- .../scala/org/apache/spark/sql/execution/command/views.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala index 23f1d6c983413..6586a22b90396 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala @@ -27,6 +27,7 @@ import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, AttributeRef import org.apache.spark.sql.catalyst.plans.QueryPlan import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project, View} import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.NamespaceHelper +import org.apache.spark.sql.connector.catalog.CatalogV2Util.assertNoNullTypeInSchema import org.apache.spark.sql.internal.StaticSQLConf import org.apache.spark.sql.types.{BooleanType, MetadataBuilder, StringType} import org.apache.spark.sql.util.SchemaUtils @@ -96,6 +97,7 @@ case class CreateViewCommand( qe.assertAnalyzed() val analyzedPlan = qe.analyzed + assertNoNullTypeInSchema(analyzedPlan.schema) if (userSpecifiedColumns.nonEmpty && userSpecifiedColumns.length != analyzedPlan.output.length) { throw new AnalysisException(s"The number of columns produced by the SELECT clause " + From 94fb54674cf09ffdf76a5f16554c2de7d07dacd2 Mon Sep 17 00:00:00 2001 From: ulysses Date: Tue, 21 Jul 2020 08:13:49 +0800 Subject: [PATCH 08/11] mv to SQLViewSuite --- .../apache/spark/sql/execution/SQLViewSuite.scala | 15 +++++++++++++++ .../execution/command/PlanResolutionSuite.scala | 15 +-------------- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index bb033dbf92b89..8c28b4a783d65 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -738,6 +738,21 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { } test("SPARK-32356: forbid null type in create view") { + // test sql + val sql1 = "create view v as select null as c" + val sql2 = "alter view v as select null as c" + val sql3 = "create temporary view v as select null as c" + val sql4 = "create global temporary view v as select null as c" + Seq(sql1, sql2, sql3, sql4).foreach { input => + val msg = intercept[AnalysisException] { + sql(input) + }.getMessage + assert(msg.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) + } + } + + test("SPARK-32356: forbid null type in create view") { + // test df.createTempView val msg = intercept[AnalysisException] { sql("select null as c").createTempView("null_type_view") }.getMessage diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala index 7820c14150fc1..2d6a5da6d67f7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala @@ -38,7 +38,7 @@ import org.apache.spark.sql.execution.datasources.CreateTable import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.sources.SimpleScanSource -import org.apache.spark.sql.types.{CharType, DoubleType, HIVE_TYPE_STRING, IntegerType, LongType, MetadataBuilder, NullType, StringType, StructField, StructType} +import org.apache.spark.sql.types.{CharType, DoubleType, HIVE_TYPE_STRING, IntegerType, LongType, MetadataBuilder, StringType, StructField, StructType} class PlanResolutionSuite extends AnalysisTest { import CatalystSqlParser._ @@ -1557,19 +1557,6 @@ class PlanResolutionSuite extends AnalysisTest { checkFailure("testcat.tab", "foo") } - test("SPARK-32356: forbid null type in create view") { - val sql1 = "create view v as select null as c" - val sql2 = "alter view v as select null as c" - val sql3 = "create temporary view v as select null as c" - val sql4 = "create global temporary view v as select null as c" - Seq(sql1, sql2, sql3, sql4).foreach { sql => - val msg = intercept[AnalysisException] { - parseAndResolve(sql) - }.getMessage - assert(msg.contains(s"Cannot create tables/views with ${NullType.simpleString} type.")) - } - } - // TODO: add tests for more commands. } From 4d8d0888277fa1cf02317d2f3cc0e037eabbdfea Mon Sep 17 00:00:00 2001 From: ulysses Date: Tue, 21 Jul 2020 10:22:33 +0800 Subject: [PATCH 09/11] fix ut name --- .../scala/org/apache/spark/sql/execution/SQLViewSuite.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index 8c28b4a783d65..b43e51fb6b25c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -737,7 +737,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { } } - test("SPARK-32356: forbid null type in create view") { + test("SPARK-32356: Forbid create view with null type in sql side") { // test sql val sql1 = "create view v as select null as c" val sql2 = "alter view v as select null as c" @@ -751,7 +751,7 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { } } - test("SPARK-32356: forbid null type in create view") { + test("SPARK-32356: Forbid create view with null type in dataset side") { // test df.createTempView val msg = intercept[AnalysisException] { sql("select null as c").createTempView("null_type_view") From 3cb224f1594a34db9254d0e0697c1be0aee68263 Mon Sep 17 00:00:00 2001 From: ulysses Date: Fri, 24 Jul 2020 10:05:21 +0800 Subject: [PATCH 10/11] remove null --- .../inputs/postgreSQL/aggregates_part2.sql | 9 +++------ .../results/postgreSQL/aggregates_part2.sql.out | 15 ++++++--------- .../apache/spark/sql/execution/PlannerSuite.scala | 1 - .../columnar/InMemoryColumnarQuerySuite.scala | 8 ++------ 4 files changed, 11 insertions(+), 22 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/aggregates_part2.sql b/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/aggregates_part2.sql index a8af1db77563c..c79c31921ce14 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/aggregates_part2.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/aggregates_part2.sql @@ -115,9 +115,9 @@ SELECT -- [SPARK-27880] Implement boolean aggregates(BOOL_AND, BOOL_OR and EVERY) CREATE OR REPLACE TEMPORARY VIEW bool_test AS SELECT * FROM VALUES - (TRUE, null, FALSE, null), - (FALSE, TRUE, null, null), - (null, TRUE, FALSE, null) AS bool_test(b1, b2, b3, b4); + (TRUE, null, FALSE), + (FALSE, TRUE, null), + (null, TRUE, FALSE) AS bool_test(b1, b2, b3); -- empty case SELECT BOOL_AND(b1) AS n1, BOOL_OR(b3) AS n2 FROM bool_test WHERE 1 = 0; @@ -126,7 +126,6 @@ SELECT BOOL_AND(b1) AS f1, BOOL_AND(b2) AS t2, BOOL_AND(b3) AS f3, - BOOL_AND(b4) AS n4, BOOL_AND(NOT b2) AS f5, BOOL_AND(NOT b3) AS t6 FROM bool_test; @@ -135,7 +134,6 @@ SELECT EVERY(b1) AS f1, EVERY(b2) AS t2, EVERY(b3) AS f3, - EVERY(b4) AS n4, EVERY(NOT b2) AS f5, EVERY(NOT b3) AS t6 FROM bool_test; @@ -144,7 +142,6 @@ SELECT BOOL_OR(b1) AS t1, BOOL_OR(b2) AS t2, BOOL_OR(b3) AS f3, - BOOL_OR(b4) AS n4, BOOL_OR(NOT b2) AS f5, BOOL_OR(NOT b3) AS t6 FROM bool_test; diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part2.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part2.sql.out index 6633bf5d114ed..ff401f111a879 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part2.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part2.sql.out @@ -135,9 +135,9 @@ true false true false true true true true true -- !query CREATE OR REPLACE TEMPORARY VIEW bool_test AS SELECT * FROM VALUES - (TRUE, null, FALSE, null), - (FALSE, TRUE, null, null), - (null, TRUE, FALSE, null) AS bool_test(b1, b2, b3, b4) + (TRUE, null, FALSE), + (FALSE, TRUE, null), + (null, TRUE, FALSE) AS bool_test(b1, b2, b3, b4) -- !query schema struct<> -- !query output @@ -157,14 +157,13 @@ SELECT BOOL_AND(b1) AS f1, BOOL_AND(b2) AS t2, BOOL_AND(b3) AS f3, - BOOL_AND(b4) AS n4, BOOL_AND(NOT b2) AS f5, BOOL_AND(NOT b3) AS t6 FROM bool_test -- !query schema struct -- !query output -false true false NULL false true +false true false false true -- !query @@ -172,14 +171,13 @@ SELECT EVERY(b1) AS f1, EVERY(b2) AS t2, EVERY(b3) AS f3, - EVERY(b4) AS n4, EVERY(NOT b2) AS f5, EVERY(NOT b3) AS t6 FROM bool_test -- !query schema struct -- !query output -false true false NULL false true +false true false false true -- !query @@ -187,14 +185,13 @@ SELECT BOOL_OR(b1) AS t1, BOOL_OR(b2) AS t2, BOOL_OR(b3) AS f3, - BOOL_OR(b4) AS n4, BOOL_OR(NOT b2) AS f5, BOOL_OR(NOT b3) AS t6 FROM bool_test -- !query schema struct -- !query output -true true false NULL false true +true true false false true -- !query diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala index d428b7ebc0e91..8790d05b6c7b7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala @@ -118,7 +118,6 @@ class PlannerSuite extends SharedSparkSession with AdaptiveSparkPlanHelper { } val simpleTypes = - NullType :: BooleanType :: ByteType :: ShortType :: diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala index 18f29f7b90ad5..b2808b33962fa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala @@ -106,15 +106,11 @@ class InMemoryColumnarQuerySuite extends QueryTest with SharedSparkSession { } test("non-primitive type with nullability:true") { - val schemaNull = StructType(Seq(StructField("col", NullType, true))) - val rddNull = spark.sparkContext.parallelize((1 to 10).map(i => Row(null))) - cachePrimitiveTest(spark.createDataFrame(rddNull, schemaNull), "Null") - tesNonPrimitiveType(true) } test("non-primitive type with nullability:false") { - tesNonPrimitiveType(false) + tesNonPrimitiveType(false) } test("simple columnar query") { @@ -248,7 +244,7 @@ class InMemoryColumnarQuerySuite extends QueryTest with SharedSparkSession { StructField("f1", FloatType, true) :: StructField("f2", ArrayType(BooleanType), true) :: Nil) val dataTypes = - Seq(StringType, BinaryType, NullType, BooleanType, + Seq(StringType, BinaryType, BooleanType, ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType, DecimalType(25, 5), DecimalType(6, 5), DateType, TimestampType, ArrayType(IntegerType), struct) From d02986cffe43150d6184915eeec1741c51eace83 Mon Sep 17 00:00:00 2001 From: ulysses Date: Fri, 24 Jul 2020 12:34:17 +0800 Subject: [PATCH 11/11] remove null --- .../udf/postgreSQL/udf-aggregates_part2.sql | 6 +++--- .../udf/postgreSQL/udf-aggregates_part2.sql.out | 15 ++++++--------- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/inputs/udf/postgreSQL/udf-aggregates_part2.sql b/sql/core/src/test/resources/sql-tests/inputs/udf/postgreSQL/udf-aggregates_part2.sql index b4054850062b7..936234fdddb02 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/udf/postgreSQL/udf-aggregates_part2.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/udf/postgreSQL/udf-aggregates_part2.sql @@ -103,9 +103,9 @@ SELECT -- [SPARK-27880] Implement boolean aggregates(BOOL_AND, BOOL_OR and EVERY) CREATE OR REPLACE TEMPORARY VIEW bool_test AS SELECT * FROM VALUES - (TRUE, null, FALSE, null), - (FALSE, TRUE, null, null), - (null, TRUE, FALSE, null) AS bool_test(b1, b2, b3, b4); + (TRUE, null, FALSE), + (FALSE, TRUE, null), + (null, TRUE, FALSE) AS bool_test(b1, b2, b3); -- empty case SELECT BOOL_AND(b1) AS n1, BOOL_OR(b3) AS n2 FROM bool_test WHERE 1 = 0; diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part2.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part2.sql.out index d4941d0a0b768..ebf95b03fe892 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part2.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part2.sql.out @@ -96,9 +96,9 @@ true false true false true true true true true -- !query CREATE OR REPLACE TEMPORARY VIEW bool_test AS SELECT * FROM VALUES - (TRUE, null, FALSE, null), - (FALSE, TRUE, null, null), - (null, TRUE, FALSE, null) AS bool_test(b1, b2, b3, b4) + (TRUE, null, FALSE), + (FALSE, TRUE, null), + (null, TRUE, FALSE) AS bool_test(b1, b2, b3) -- !query schema struct<> -- !query output @@ -118,14 +118,13 @@ SELECT BOOL_AND(b1) AS f1, BOOL_AND(b2) AS t2, BOOL_AND(b3) AS f3, - BOOL_AND(b4) AS n4, BOOL_AND(NOT b2) AS f5, BOOL_AND(NOT b3) AS t6 FROM bool_test -- !query schema struct -- !query output -false true false NULL false true +false true false false true -- !query @@ -133,14 +132,13 @@ SELECT EVERY(b1) AS f1, EVERY(b2) AS t2, EVERY(b3) AS f3, - EVERY(b4) AS n4, EVERY(NOT b2) AS f5, EVERY(NOT b3) AS t6 FROM bool_test -- !query schema struct -- !query output -false true false NULL false true +false true false false true -- !query @@ -148,14 +146,13 @@ SELECT BOOL_OR(b1) AS t1, BOOL_OR(b2) AS t2, BOOL_OR(b3) AS f3, - BOOL_OR(b4) AS n4, BOOL_OR(NOT b2) AS f5, BOOL_OR(NOT b3) AS t6 FROM bool_test -- !query schema struct -- !query output -true true false NULL false true +true true false false true -- !query