diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala index f41030bf3cbcc..673925865c06f 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala @@ -37,7 +37,7 @@ class QueryCompilationErrorsSuite extends QueryTest with SharedSparkSession { val msg1 = intercept[AnalysisException] { sql("select 'value1' as a, 1L as b").as[StringIntClass] }.message - assert(msg1 == + assert(msg1 === s""" |Cannot up cast b from bigint to int. |The type path of the target object is: @@ -51,7 +51,7 @@ class QueryCompilationErrorsSuite extends QueryTest with SharedSparkSession { " named_struct('a', 'value1', 'b', cast(1.0 as decimal(38,18))) as b") .as[ComplexClass] }.message - assert(msg2 == + assert(msg2 === s""" |Cannot up cast b.`b` from decimal(38,18) to bigint. |The type path of the target object is: @@ -72,9 +72,8 @@ class QueryCompilationErrorsSuite extends QueryTest with SharedSparkSession { Dataset.ofRows(spark, plan) }.message - assert(msg.contains("The feature is not supported: " + + assert(msg.matches("The feature is not supported: " + "UpCast only support DecimalType as AbstractDataType yet," + - " but got: org.apache.spark.sql.types.NumericType")) + """ but got: org.apache.spark.sql.types.NumericType\$\@\w+""")) } - } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala index 4b2564034344a..d241f6c3b768e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala @@ -50,9 +50,9 @@ class QueryExecutionErrorsSuite extends QueryTest with SharedSparkSession { }.getCause.asInstanceOf[SparkRuntimeException] assert(e.getErrorClass === "INVALID_PARAMETER_VALUE") assert(e.getSqlState === "22023") - assert(e.getMessage.contains( - "The value of parameter(s) 'key' in the aes_encrypt/aes_decrypt function is invalid: " + - "expects a binary value with 16, 24 or 32 bytes, but got")) + assert(e.getMessage.matches( + "The value of parameter\\(s\\) 'key' in the aes_encrypt/aes_decrypt function is invalid: " + + "expects a binary value with 16, 24 or 32 bytes, but got \\d+ bytes.")) } // Encryption failure - invalid key length @@ -84,9 +84,11 @@ class QueryExecutionErrorsSuite extends QueryTest with SharedSparkSession { }.getCause.asInstanceOf[SparkRuntimeException] assert(e.getErrorClass === "INVALID_PARAMETER_VALUE") assert(e.getSqlState === "22023") - assert(e.getMessage.contains( + assert(e.getMessage === "The value of parameter(s) 'expr, key' in the aes_encrypt/aes_decrypt function " + - "is invalid: Detail message:")) + "is invalid: Detail message: " + + "Given final block not properly padded. " + + "Such issues can arise if a bad key is used during decryption.") } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala index 03117b9608d0f..466852dae7022 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala @@ -32,7 +32,7 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { } assert(e.getErrorClass === errorClass) assert(e.getSqlState === sqlState) - assert(e.getMessage.contains(message)) + assert(e.getMessage === message) } test("UNSUPPORTED_FEATURE: LATERAL join with NATURAL join not supported") { @@ -40,7 +40,14 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { sqlText = "SELECT * FROM t1 NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2)", errorClass = "UNSUPPORTED_FEATURE", sqlState = "0A000", - message = "The feature is not supported: LATERAL join with NATURAL join.") + message = + """ + |The feature is not supported: LATERAL join with NATURAL join.(line 1, pos 14) + | + |== SQL == + |SELECT * FROM t1 NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2) + |--------------^^^ + |""".stripMargin) } test("UNSUPPORTED_FEATURE: LATERAL join with USING join not supported") { @@ -48,11 +55,19 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { sqlText = "SELECT * FROM t1 JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2)", errorClass = "UNSUPPORTED_FEATURE", sqlState = "0A000", - message = "The feature is not supported: LATERAL join with USING join.") + message = + """ + |The feature is not supported: LATERAL join with USING join.(line 1, pos 14) + | + |== SQL == + |SELECT * FROM t1 JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2) + |--------------^^^ + |""".stripMargin) } test("UNSUPPORTED_FEATURE: Unsupported LATERAL join type") { - Seq(("RIGHT OUTER", "RightOuter"), + Seq( + ("RIGHT OUTER", "RightOuter"), ("FULL OUTER", "FullOuter"), ("LEFT SEMI", "LeftSemi"), ("LEFT ANTI", "LeftAnti")).foreach { pair => @@ -60,22 +75,38 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { sqlText = s"SELECT * FROM t1 ${pair._1} JOIN LATERAL (SELECT c1 + c2 AS c3) ON c2 = c3", errorClass = "UNSUPPORTED_FEATURE", sqlState = "0A000", - message = s"The feature is not supported: LATERAL join type '${pair._2}'.") + message = + s""" + |The feature is not supported: LATERAL join type '${pair._2}'.(line 1, pos 14) + | + |== SQL == + |SELECT * FROM t1 ${pair._1} JOIN LATERAL (SELECT c1 + c2 AS c3) ON c2 = c3 + |--------------^^^ + |""".stripMargin) } } test("SPARK-35789: INVALID_SQL_SYNTAX - LATERAL can only be used with subquery") { - Seq("SELECT * FROM t1, LATERAL t2", - "SELECT * FROM t1 JOIN LATERAL t2", - "SELECT * FROM t1, LATERAL (t2 JOIN t3)", - "SELECT * FROM t1, LATERAL (LATERAL t2)", - "SELECT * FROM t1, LATERAL VALUES (0, 1)", - "SELECT * FROM t1, LATERAL RANGE(0, 1)").foreach { sqlText => + Seq( + "SELECT * FROM t1, LATERAL t2" -> 26, + "SELECT * FROM t1 JOIN LATERAL t2" -> 30, + "SELECT * FROM t1, LATERAL (t2 JOIN t3)" -> 26, + "SELECT * FROM t1, LATERAL (LATERAL t2)" -> 26, + "SELECT * FROM t1, LATERAL VALUES (0, 1)" -> 26, + "SELECT * FROM t1, LATERAL RANGE(0, 1)" -> 26 + ).foreach { case (sqlText, pos) => validateParsingError( sqlText = sqlText, errorClass = "INVALID_SQL_SYNTAX", sqlState = "42000", - message = "Invalid SQL syntax: LATERAL can only be used with subquery.") + message = + s""" + |Invalid SQL syntax: LATERAL can only be used with subquery.(line 1, pos $pos) + | + |== SQL == + |$sqlText + |${"-" * pos}^^^ + |""".stripMargin) } } @@ -84,6 +115,13 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { sqlText = "SELECT * FROM a NATURAL CROSS JOIN b", errorClass = "UNSUPPORTED_FEATURE", sqlState = "0A000", - message = "The feature is not supported: NATURAL CROSS JOIN.") + message = + """ + |The feature is not supported: NATURAL CROSS JOIN.(line 1, pos 14) + | + |== SQL == + |SELECT * FROM a NATURAL CROSS JOIN b + |--------------^^^ + |""".stripMargin) } }