diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala index a2c72d7173c48..4b27dfc00c2aa 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala @@ -137,7 +137,7 @@ class AnalysisException protected[sql] ( cause: Option[Throwable] = this.cause, errorClass: Option[String] = this.errorClass, messageParameters: Map[String, String] = this.messageParameters, - context: Array[QueryContext] = Array.empty): AnalysisException = + context: Array[QueryContext] = this.context): AnalysisException = new AnalysisException(message, line, startPosition, plan, cause, errorClass, errorSubClass, messageParameters, context) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index b4d927adecb67..6fc9d756c998d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -209,8 +209,7 @@ class Analyzer(override val catalogManager: CatalogManager) analyzed } catch { case e: AnalysisException => - val ae = e.copy(plan = Option(analyzed), - context = analyzed.origin.getQueryContext) + val ae = e.copy(plan = Option(analyzed)) ae.setStackTrace(e.getStackTrace) throw ae } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 9cc649878044f..fe601ed66b8fd 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -296,9 +296,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "select '2' / interval 2 second" + "fragment" : "'2' / interval 2 second" } ] } @@ -320,9 +320,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 28, - "fragment" : "select '2' / interval 2 year" + "fragment" : "'2' / interval 2 year" } ] } @@ -430,9 +430,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 28, - "fragment" : "select 2 / interval '2' year" + "fragment" : "2 / interval '2' year" } ] } @@ -454,9 +454,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 28, - "fragment" : "select 2 / interval '2' hour" + "fragment" : "2 / interval '2' hour" } ] } @@ -478,9 +478,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 31, - "fragment" : "select null / interval '2' year" + "fragment" : "null / interval '2' year" } ] } @@ -502,9 +502,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 31, - "fragment" : "select null / interval '2' hour" + "fragment" : "null / interval '2' hour" } ] } @@ -1680,9 +1680,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 388, - "fragment" : "select\n interval '2' year + '3-3 year to month',\n interval '2' year - '3 month',\n '3-2 year to month' + interval '2-2' year to month,\n '3 year' - interval '2-2' year to month,\n interval '99 11:22:33.123456789' day to second + '12:12 hour to second',\n interval '99 11:22:33.123456789' day to second - '12 hour',\n '4 day' + interval '10' day,\n '4 22 day to hour' - interval '10' day" + "startIndex" : 10, + "stopIndex" : 48, + "fragment" : "interval '2' year + '3-3 year to month'" } ] } @@ -1720,9 +1720,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 32, - "fragment" : "select interval '2' year + '3-3'" + "fragment" : "interval '2' year + '3-3'" } ] } @@ -1744,9 +1744,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "select interval '2' year - '4'" + "fragment" : "interval '2' year - '4'" } ] } @@ -1826,9 +1826,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 49, - "fragment" : "select interval '2' year + str from interval_view" + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "interval '2' year + str" } ] } @@ -1850,9 +1850,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 49, - "fragment" : "select interval '2' year - str from interval_view" + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "interval '2' year - str" } ] } @@ -1951,9 +1951,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 54, - "fragment" : "select interval '3' day - interval '2-2' year to month" + "fragment" : "interval '3' day - interval '2-2' year to month" } ] } @@ -1984,9 +1984,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "select 1 + interval '2' month" + "fragment" : "1 + interval '2' month" } ] } @@ -2017,9 +2017,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "select interval '2' month - 1" + "fragment" : "interval '2' month - 1" } ] } @@ -2888,9 +2888,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 42, - "fragment" : "SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS" + "fragment" : "INTERVAL 1 MONTH > INTERVAL 20 DAYS" } ] } @@ -2912,9 +2912,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT INTERVAL '1' DAY < '1'" + "fragment" : "INTERVAL '1' DAY < '1'" } ] } @@ -2936,9 +2936,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT INTERVAL '1' DAY = '1'" + "fragment" : "INTERVAL '1' DAY = '1'" } ] } @@ -2960,9 +2960,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT INTERVAL '1' DAY > '1'" + "fragment" : "INTERVAL '1' DAY > '1'" } ] } @@ -2984,9 +2984,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT '1' < INTERVAL '1' DAY" + "fragment" : "'1' < INTERVAL '1' DAY" } ] } @@ -3008,9 +3008,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT '1' = INTERVAL '1' DAY" + "fragment" : "'1' = INTERVAL '1' DAY" } ] } @@ -3032,9 +3032,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT '1' > INTERVAL '1' DAY" + "fragment" : "'1' > INTERVAL '1' DAY" } ] } @@ -3056,9 +3056,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT INTERVAL '1' YEAR < '1'" + "fragment" : "INTERVAL '1' YEAR < '1'" } ] } @@ -3080,9 +3080,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT INTERVAL '1' YEAR = '1'" + "fragment" : "INTERVAL '1' YEAR = '1'" } ] } @@ -3104,9 +3104,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT INTERVAL '1' YEAR > '1'" + "fragment" : "INTERVAL '1' YEAR > '1'" } ] } @@ -3128,9 +3128,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT '1' < INTERVAL '1' YEAR" + "fragment" : "'1' < INTERVAL '1' YEAR" } ] } @@ -3152,9 +3152,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT '1' = INTERVAL '1' YEAR" + "fragment" : "'1' = INTERVAL '1' YEAR" } ] } @@ -3176,9 +3176,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT '1' > INTERVAL '1' YEAR" + "fragment" : "'1' > INTERVAL '1' YEAR" } ] } @@ -3298,9 +3298,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 49, - "fragment" : "SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY)" + "fragment" : "div(INTERVAL '1' MONTH, INTERVAL '-1' DAY)" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index 7969ccf1162d2..bd2bed273180d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -730,9 +730,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 43, - "fragment" : "select timestamp'2011-11-11 11:11:11' + '1'" + "fragment" : "timestamp'2011-11-11 11:11:11' + '1'" } ] } @@ -754,9 +754,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 43, - "fragment" : "select '1' + timestamp'2011-11-11 11:11:11'" + "fragment" : "'1' + timestamp'2011-11-11 11:11:11'" } ] } @@ -778,9 +778,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 44, - "fragment" : "select timestamp'2011-11-11 11:11:11' + null" + "fragment" : "timestamp'2011-11-11 11:11:11' + null" } ] } @@ -802,9 +802,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 44, - "fragment" : "select null + timestamp'2011-11-11 11:11:11'" + "fragment" : "null + timestamp'2011-11-11 11:11:11'" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out index 3b25d183d5d65..253637eecf4ae 100644 --- a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out @@ -169,9 +169,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 34, - "fragment" : "SELECT db1.t1.i1 FROM t1, mydb2.t1" + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "db1.t1.i1" } ] } @@ -209,9 +209,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 23, - "fragment" : "SELECT mydb1.t1 FROM t1" + "startIndex" : 8, + "stopIndex" : 15, + "fragment" : "mydb1.t1" } ] } @@ -242,9 +242,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 23, - "fragment" : "SELECT t1 FROM mydb1.t1" + "startIndex" : 8, + "stopIndex" : 9, + "fragment" : "t1" } ] } @@ -274,9 +274,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 26, - "fragment" : "SELECT mydb1.t1.i1 FROM t1" + "startIndex" : 8, + "stopIndex" : 18, + "fragment" : "mydb1.t1.i1" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index decb0e6ac632c..6091cb9b0f917 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -1504,9 +1504,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 43, - "fragment" : "select timestamp'2011-11-11 11:11:11' + '1'" + "fragment" : "timestamp'2011-11-11 11:11:11' + '1'" } ] } @@ -1528,9 +1528,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 43, - "fragment" : "select '1' + timestamp'2011-11-11 11:11:11'" + "fragment" : "'1' + timestamp'2011-11-11 11:11:11'" } ] } @@ -1552,9 +1552,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 44, - "fragment" : "select timestamp'2011-11-11 11:11:11' + null" + "fragment" : "timestamp'2011-11-11 11:11:11' + null" } ] } @@ -1576,9 +1576,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 44, - "fragment" : "select null + timestamp'2011-11-11 11:11:11'" + "fragment" : "null + timestamp'2011-11-11 11:11:11'" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out index f7b1a90ad4e10..b458ea6a2bf70 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out @@ -233,14 +233,7 @@ org.apache.spark.sql.AnalysisException "sqlState" : "42000", "messageParameters" : { "expression" : "\"a\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 56, - "stopIndex" : 65, - "fragment" : "GROUP BY b" - } ] + } } @@ -726,14 +719,7 @@ org.apache.spark.sql.AnalysisException "sqlState" : "42000", "messageParameters" : { "expression" : "\"a\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 65, - "stopIndex" : 78, - "fragment" : "GROUP BY a + 1" - } ] + } } diff --git a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out index ad4106ff48997..3505c97bd4649 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out @@ -48,14 +48,7 @@ org.apache.spark.sql.AnalysisException "sqlState" : "42000", "messageParameters" : { "expression" : "\"a\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 34, - "stopIndex" : 43, - "fragment" : "GROUP BY b" - } ] + } } @@ -125,14 +118,7 @@ org.apache.spark.sql.AnalysisException "sqlState" : "42000", "messageParameters" : { "expression" : "\"a\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 38, - "stopIndex" : 51, - "fragment" : "GROUP BY a + 1" - } ] + } } @@ -201,9 +187,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 50, - "stopIndex" : 59, - "fragment" : "GROUP BY k" + "startIndex" : 22, + "stopIndex" : 33, + "fragment" : "non_existing" } ] } @@ -237,14 +223,7 @@ org.apache.spark.sql.AnalysisException "sqlState" : "42000", "messageParameters" : { "expression" : "\"k\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 59, - "stopIndex" : 68, - "fragment" : "GROUP BY a" - } ] + } } @@ -273,9 +252,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 39, + "startIndex" : 48, "stopIndex" : 48, - "fragment" : "GROUP BY k" + "fragment" : "k" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out b/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out index c024386da0c70..83b721373dae6 100644 --- a/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out @@ -171,14 +171,7 @@ org.apache.spark.sql.AnalysisException "sqlState" : "42000", "messageParameters" : { "expression" : "\"c1\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 48, - "stopIndex" : 74, - "fragment" : "GROUP BY GROUPING SETS (())" - } ] + } } diff --git a/sql/core/src/test/resources/sql-tests/results/having.sql.out b/sql/core/src/test/resources/sql-tests/results/having.sql.out index 4f59e111069af..098f7bb3e257d 100644 --- a/sql/core/src/test/resources/sql-tests/results/having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/having.sql.out @@ -46,9 +46,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 44, "stopIndex" : 55, - "fragment" : "SELECT count(k) FROM hav GROUP BY v HAVING v = array(1)" + "fragment" : "v = array(1)" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 29516de73149e..e75ceb3ff7d3b 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -194,9 +194,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "select '2' / interval 2 second" + "fragment" : "'2' / interval 2 second" } ] } @@ -218,9 +218,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 28, - "fragment" : "select '2' / interval 2 year" + "fragment" : "'2' / interval 2 year" } ] } @@ -328,9 +328,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 28, - "fragment" : "select 2 / interval '2' year" + "fragment" : "2 / interval '2' year" } ] } @@ -352,9 +352,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 28, - "fragment" : "select 2 / interval '2' hour" + "fragment" : "2 / interval '2' hour" } ] } @@ -376,9 +376,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 31, - "fragment" : "select null / interval '2' year" + "fragment" : "null / interval '2' year" } ] } @@ -400,9 +400,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 31, - "fragment" : "select null / interval '2' hour" + "fragment" : "null / interval '2' hour" } ] } @@ -1561,9 +1561,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 388, - "fragment" : "select\n interval '2' year + '3-3 year to month',\n interval '2' year - '3 month',\n '3-2 year to month' + interval '2-2' year to month,\n '3 year' - interval '2-2' year to month,\n interval '99 11:22:33.123456789' day to second + '12:12 hour to second',\n interval '99 11:22:33.123456789' day to second - '12 hour',\n '4 day' + interval '10' day,\n '4 22 day to hour' - interval '10' day" + "startIndex" : 10, + "stopIndex" : 48, + "fragment" : "interval '2' year + '3-3 year to month'" } ] } @@ -1601,9 +1601,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 32, - "fragment" : "select interval '2' year + '3-3'" + "fragment" : "interval '2' year + '3-3'" } ] } @@ -1625,9 +1625,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "select interval '2' year - '4'" + "fragment" : "interval '2' year - '4'" } ] } @@ -1673,9 +1673,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 49, - "fragment" : "select interval '2' year + str from interval_view" + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "interval '2' year + str" } ] } @@ -1697,9 +1697,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 49, - "fragment" : "select interval '2' year - str from interval_view" + "startIndex" : 8, + "stopIndex" : 30, + "fragment" : "interval '2' year - str" } ] } @@ -1764,9 +1764,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 54, - "fragment" : "select interval '3' day - interval '2-2' year to month" + "fragment" : "interval '3' day - interval '2-2' year to month" } ] } @@ -1797,9 +1797,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "select 1 + interval '2' month" + "fragment" : "1 + interval '2' month" } ] } @@ -1830,9 +1830,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "select interval '2' month - 1" + "fragment" : "interval '2' month - 1" } ] } @@ -2701,9 +2701,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 42, - "fragment" : "SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS" + "fragment" : "INTERVAL 1 MONTH > INTERVAL 20 DAYS" } ] } @@ -2725,9 +2725,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT INTERVAL '1' DAY < '1'" + "fragment" : "INTERVAL '1' DAY < '1'" } ] } @@ -2749,9 +2749,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT INTERVAL '1' DAY = '1'" + "fragment" : "INTERVAL '1' DAY = '1'" } ] } @@ -2773,9 +2773,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT INTERVAL '1' DAY > '1'" + "fragment" : "INTERVAL '1' DAY > '1'" } ] } @@ -2797,9 +2797,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT '1' < INTERVAL '1' DAY" + "fragment" : "'1' < INTERVAL '1' DAY" } ] } @@ -2821,9 +2821,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT '1' = INTERVAL '1' DAY" + "fragment" : "'1' = INTERVAL '1' DAY" } ] } @@ -2845,9 +2845,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 29, - "fragment" : "SELECT '1' > INTERVAL '1' DAY" + "fragment" : "'1' > INTERVAL '1' DAY" } ] } @@ -2869,9 +2869,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT INTERVAL '1' YEAR < '1'" + "fragment" : "INTERVAL '1' YEAR < '1'" } ] } @@ -2893,9 +2893,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT INTERVAL '1' YEAR = '1'" + "fragment" : "INTERVAL '1' YEAR = '1'" } ] } @@ -2917,9 +2917,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT INTERVAL '1' YEAR > '1'" + "fragment" : "INTERVAL '1' YEAR > '1'" } ] } @@ -2941,9 +2941,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT '1' < INTERVAL '1' YEAR" + "fragment" : "'1' < INTERVAL '1' YEAR" } ] } @@ -2965,9 +2965,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT '1' = INTERVAL '1' YEAR" + "fragment" : "'1' = INTERVAL '1' YEAR" } ] } @@ -2989,9 +2989,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 30, - "fragment" : "SELECT '1' > INTERVAL '1' YEAR" + "fragment" : "'1' > INTERVAL '1' YEAR" } ] } @@ -3111,9 +3111,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 49, - "fragment" : "SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY)" + "fragment" : "div(INTERVAL '1' MONTH, INTERVAL '-1' DAY)" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out index f68d18175f923..df0b7dbad14cc 100644 --- a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out @@ -288,9 +288,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 86, - "fragment" : "SELECT * FROM t1 JOIN LATERAL (SELECT t1.c1 AS a, t2.c1 AS b) s JOIN t2 ON s.b = t2.c1" + "startIndex" : 51, + "stopIndex" : 55, + "fragment" : "t2.c1" } ] } @@ -420,9 +420,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT * FROM t1, LATERAL (SELECT * FROM t2, LATERAL (SELECT t1.c1 + t2.c1))" + "startIndex" : 62, + "stopIndex" : 66, + "fragment" : "t1.c1" } ] } @@ -443,9 +443,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 74, - "fragment" : "SELECT * FROM t1, LATERAL (SELECT * FROM (SELECT c1), LATERAL (SELECT c2))" + "startIndex" : 71, + "stopIndex" : 72, + "fragment" : "c2" } ] } @@ -485,9 +485,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 80, - "fragment" : "SELECT * FROM t1, LATERAL (SELECT c1, (SELECT SUM(c2) FROM t2 WHERE c1 = t1.c1))" + "startIndex" : 74, + "stopIndex" : 78, + "fragment" : "t1.c1" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out b/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out index bfcf63a318697..4d8847b6d0d6c 100644 --- a/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out @@ -240,9 +240,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 54, - "fragment" : "SELECT nt2.k FROM (SELECT * FROM nt1 natural join nt2)" + "startIndex" : 8, + "stopIndex" : 12, + "fragment" : "nt2.k" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out index 5eb70900d465a..d5f260d3acea2 100644 --- a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out @@ -240,9 +240,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 62, "stopIndex" : 113, - "fragment" : "SELECT * FROM (\n SELECT course, earnings FROM courseSales\n)\nPIVOT (\n sum(earnings)\n FOR year IN (2012, 2013)\n)" + "fragment" : "PIVOT (\n sum(earnings)\n FOR year IN (2012, 2013)\n)" } ] } @@ -357,9 +357,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 75, - "fragment" : "SELECT * FROM courseSales\nPIVOT (\n sum(earnings)\n FOR year IN (s, 2013)\n)" + "startIndex" : 66, + "stopIndex" : 66, + "fragment" : "s" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out index f4a3688348e2e..ecbb871fdd2d3 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out @@ -506,8 +506,8 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 95, - "fragment" : "select\n (select max((select i.unique2 from tenk1 i where i.unique1 = o.unique1)))\nfrom tenk1 o" + "startIndex" : 71, + "stopIndex" : 79, + "fragment" : "o.unique1" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out index 26e05a444d25f..fcdd42551d1d3 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out @@ -79,9 +79,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 109, - "fragment" : "CREATE VIEW key_dependent_view_no_cols AS\n SELECT FROM view_base_table GROUP BY key HAVING length(data) > 0" + "startIndex" : 53, + "stopIndex" : 56, + "fragment" : "FROM" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out index de2a939c8a549..df9b4ec209535 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out @@ -3256,9 +3256,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 78, "stopIndex" : 81, - "fragment" : "select * from\n int8_tbl x join (int4_tbl x cross join int4_tbl y) j on q1 = y.f1" + "fragment" : "y.f1" } ] } @@ -3290,9 +3290,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 67, - "fragment" : "select t1.uunique1 from\n tenk1 t1 join tenk2 t2 on t1.two = t2.two" + "startIndex" : 8, + "stopIndex" : 18, + "fragment" : "t1.uunique1" } ] } @@ -3315,9 +3315,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 67, - "fragment" : "select t2.uunique1 from\n tenk1 t1 join tenk2 t2 on t1.two = t2.two" + "startIndex" : 8, + "stopIndex" : 18, + "fragment" : "t2.uunique1" } ] } @@ -3340,9 +3340,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 64, - "fragment" : "select uunique1 from\n tenk1 t1 join tenk2 t2 on t1.two = t2.two" + "startIndex" : 8, + "stopIndex" : 15, + "fragment" : "uunique1" } ] } @@ -3554,9 +3554,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 48, - "fragment" : "select f1,g from int4_tbl a, (select f1 as g) ss" + "startIndex" : 38, + "stopIndex" : 39, + "fragment" : "f1" } ] } @@ -3577,9 +3577,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 50, - "fragment" : "select f1,g from int4_tbl a, (select a.f1 as g) ss" + "startIndex" : 38, + "stopIndex" : 41, + "fragment" : "a.f1" } ] } @@ -3600,9 +3600,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 58, - "fragment" : "select f1,g from int4_tbl a cross join (select f1 as g) ss" + "startIndex" : 48, + "stopIndex" : 49, + "fragment" : "f1" } ] } @@ -3623,9 +3623,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "select f1,g from int4_tbl a cross join (select a.f1 as g) ss" + "startIndex" : 48, + "stopIndex" : 51, + "fragment" : "a.f1" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out index d717c5f7e677b..2e9a827ce5555 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out @@ -160,9 +160,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 45, - "fragment" : "SELECT 1 AS one FROM test_having HAVING a > 1" + "startIndex" : 41, + "stopIndex" : 41, + "fragment" : "a" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out index b5d1df99934c7..e6fb22e8d4967 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out @@ -130,9 +130,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 53, + "startIndex" : 62, "stopIndex" : 62, - "fragment" : "ORDER BY b" + "fragment" : "b" } ] } @@ -364,9 +364,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 53, + "startIndex" : 62, "stopIndex" : 62, - "fragment" : "ORDER BY b" + "fragment" : "b" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out index 75c3fc1f53b16..354c6b26cea44 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out @@ -592,9 +592,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 56, - "stopIndex" : 74, - "fragment" : "ORDER BY q2 LIMIT 1" + "startIndex" : 65, + "stopIndex" : 66, + "fragment" : "q2" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out b/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out index 03b8f59ab8b7c..df14c281a5a91 100644 --- a/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out @@ -44,9 +44,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 43, - "fragment" : "SELECT `(a)?+.+` FROM testData2 WHERE a = 1" + "startIndex" : 8, + "stopIndex" : 16, + "fragment" : "`(a)?+.+`" } ] } @@ -68,9 +68,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 47, - "fragment" : "SELECT t.`(a)?+.+` FROM testData2 t WHERE a = 1" + "startIndex" : 8, + "stopIndex" : 18, + "fragment" : "t.`(a)?+.+`" } ] } @@ -92,9 +92,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 41, - "fragment" : "SELECT `(a|b)` FROM testData2 WHERE a = 2" + "startIndex" : 8, + "stopIndex" : 14, + "fragment" : "`(a|b)`" } ] } @@ -116,9 +116,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 45, - "fragment" : "SELECT `(a|b)?+.+` FROM testData2 WHERE a = 2" + "startIndex" : 8, + "stopIndex" : 18, + "fragment" : "`(a|b)?+.+`" } ] } @@ -140,9 +140,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 38, - "fragment" : "SELECT SUM(`(a|b)?+.+`) FROM testData2" + "startIndex" : 12, + "stopIndex" : 22, + "fragment" : "`(a|b)?+.+`" } ] } @@ -164,9 +164,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 32, - "fragment" : "SELECT SUM(`(a)`) FROM testData2" + "startIndex" : 12, + "stopIndex" : 16, + "fragment" : "`(a)`" } ] } @@ -399,9 +399,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 30, + "startIndex" : 39, "stopIndex" : 43, - "fragment" : "GROUP BY `(a)`" + "fragment" : "`(a)`" } ] } @@ -423,8 +423,8 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 30, + "startIndex" : 39, "stopIndex" : 47, - "fragment" : "GROUP BY `(a)?+.+`" + "fragment" : "`(a)?+.+`" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out index 68dfc470edc62..3193e3a023637 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out @@ -145,8 +145,8 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 87, - "fragment" : "SELECT t1.t1a\nFROM t1\nJOIN t1_copy\nON EXISTS (SELECT 1 FROM t2 WHERE t2a > t1a)" + "startIndex" : 84, + "stopIndex" : 86, + "fragment" : "t1a" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out b/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out index cd54af2ae9967..b38d742b03f93 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out @@ -68,9 +68,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 52, - "fragment" : "SELECT a AS col1, b AS col2 FROM testData AS t(c, d)" + "startIndex" : 8, + "stopIndex" : 8, + "fragment" : "a" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out index 3a8a9684c66c4..6cf116a49e0a3 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out @@ -676,9 +676,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 43, - "fragment" : "select timestamp'2011-11-11 11:11:11' + '1'" + "fragment" : "timestamp'2011-11-11 11:11:11' + '1'" } ] } @@ -700,9 +700,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 43, - "fragment" : "select '1' + timestamp'2011-11-11 11:11:11'" + "fragment" : "'1' + timestamp'2011-11-11 11:11:11'" } ] } @@ -724,9 +724,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 44, - "fragment" : "select timestamp'2011-11-11 11:11:11' + null" + "fragment" : "timestamp'2011-11-11 11:11:11' + null" } ] } @@ -748,9 +748,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 44, - "fragment" : "select null + timestamp'2011-11-11 11:11:11'" + "fragment" : "null + timestamp'2011-11-11 11:11:11'" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 446687e63aea5..b494b62bdba6b 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -745,9 +745,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 43, - "fragment" : "select timestamp'2011-11-11 11:11:11' + '1'" + "fragment" : "timestamp'2011-11-11 11:11:11' + '1'" } ] } @@ -769,9 +769,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 43, - "fragment" : "select '1' + timestamp'2011-11-11 11:11:11'" + "fragment" : "'1' + timestamp'2011-11-11 11:11:11'" } ] } @@ -793,9 +793,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 44, - "fragment" : "select timestamp'2011-11-11 11:11:11' + null" + "fragment" : "timestamp'2011-11-11 11:11:11' + null" } ] } @@ -817,9 +817,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 44, - "fragment" : "select null + timestamp'2011-11-11 11:11:11'" + "fragment" : "null + timestamp'2011-11-11 11:11:11'" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index 0661df238e37d..e99fa0d882e1f 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -676,9 +676,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 43, - "fragment" : "select timestamp'2011-11-11 11:11:11' + '1'" + "fragment" : "timestamp'2011-11-11 11:11:11' + '1'" } ] } @@ -700,9 +700,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 43, - "fragment" : "select '1' + timestamp'2011-11-11 11:11:11'" + "fragment" : "'1' + timestamp'2011-11-11 11:11:11'" } ] } @@ -724,9 +724,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 44, - "fragment" : "select timestamp'2011-11-11 11:11:11' + null" + "fragment" : "timestamp'2011-11-11 11:11:11' + null" } ] } @@ -748,9 +748,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 8, "stopIndex" : 44, - "fragment" : "select null + timestamp'2011-11-11 11:11:11'" + "fragment" : "null + timestamp'2011-11-11 11:11:11'" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out index a4a3bedf59887..d93351dae77fe 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out @@ -88,9 +88,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 40, - "fragment" : "SELECT true = cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "true = cast('1' as binary)" } ] } @@ -120,9 +120,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 63, - "fragment" : "SELECT true = cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "true = cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -144,9 +144,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 56, - "fragment" : "SELECT true = cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 49, + "fragment" : "true = cast('2017-12-11 09:30:00' as date)" } ] } @@ -232,9 +232,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 42, - "fragment" : "SELECT true <=> cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "true <=> cast('1' as binary)" } ] } @@ -264,9 +264,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 65, - "fragment" : "SELECT true <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 58, + "fragment" : "true <=> cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -288,9 +288,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 58, - "fragment" : "SELECT true <=> cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "true <=> cast('2017-12-11 09:30:00' as date)" } ] } @@ -376,9 +376,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 40, - "fragment" : "SELECT cast('1' as binary) = true FROM t" + "startIndex" : 8, + "stopIndex" : 33, + "fragment" : "cast('1' as binary) = true" } ] } @@ -408,9 +408,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 63, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) = true FROM t" + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) = true" } ] } @@ -432,9 +432,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 56, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) = true FROM t" + "startIndex" : 8, + "stopIndex" : 49, + "fragment" : "cast('2017-12-11 09:30:00' as date) = true" } ] } @@ -520,9 +520,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 42, - "fragment" : "SELECT cast('1' as binary) <=> true FROM t" + "startIndex" : 8, + "stopIndex" : 35, + "fragment" : "cast('1' as binary) <=> true" } ] } @@ -552,9 +552,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 65, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> true FROM t" + "startIndex" : 8, + "stopIndex" : 58, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <=> true" } ] } @@ -576,9 +576,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 58, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <=> true FROM t" + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "cast('2017-12-11 09:30:00' as date) <=> true" } ] } @@ -664,9 +664,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 41, - "fragment" : "SELECT false = cast('0' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "false = cast('0' as binary)" } ] } @@ -696,9 +696,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 64, - "fragment" : "SELECT false = cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 57, + "fragment" : "false = cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -720,9 +720,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 57, - "fragment" : "SELECT false = cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 50, + "fragment" : "false = cast('2017-12-11 09:30:00' as date)" } ] } @@ -808,9 +808,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 43, - "fragment" : "SELECT false <=> cast('0' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "false <=> cast('0' as binary)" } ] } @@ -840,9 +840,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 66, - "fragment" : "SELECT false <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "false <=> cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -864,9 +864,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 59, - "fragment" : "SELECT false <=> cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 52, + "fragment" : "false <=> cast('2017-12-11 09:30:00' as date)" } ] } @@ -952,9 +952,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 41, - "fragment" : "SELECT cast('0' as binary) = false FROM t" + "startIndex" : 8, + "stopIndex" : 34, + "fragment" : "cast('0' as binary) = false" } ] } @@ -984,9 +984,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 64, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) = false FROM t" + "startIndex" : 8, + "stopIndex" : 57, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) = false" } ] } @@ -1008,9 +1008,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 57, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) = false FROM t" + "startIndex" : 8, + "stopIndex" : 50, + "fragment" : "cast('2017-12-11 09:30:00' as date) = false" } ] } @@ -1096,9 +1096,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 43, - "fragment" : "SELECT cast('0' as binary) <=> false FROM t" + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "cast('0' as binary) <=> false" } ] } @@ -1128,9 +1128,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 66, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> false FROM t" + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <=> false" } ] } @@ -1152,8 +1152,8 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 59, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <=> false FROM t" + "startIndex" : 8, + "stopIndex" : 52, + "fragment" : "cast('2017-12-11 09:30:00' as date) <=> false" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out index 0f485014ef3f7..387a85099528b 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out @@ -248,9 +248,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) + cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) + cast(1 as decimal(3, 0))" } ] } @@ -272,9 +272,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) + cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) + cast(1 as decimal(5, 0))" } ] } @@ -296,9 +296,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) + cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) + cast(1 as decimal(10, 0))" } ] } @@ -320,9 +320,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) + cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) + cast(1 as decimal(20, 0))" } ] } @@ -344,9 +344,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(3, 0))" } ] } @@ -368,9 +368,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(5, 0))" } ] } @@ -392,9 +392,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(10, 0))" } ] } @@ -416,9 +416,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(20, 0))" } ] } @@ -732,9 +732,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(3, 0)) + cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) + cast('1' as binary)" } ] } @@ -756,9 +756,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(5, 0)) + cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) + cast('1' as binary)" } ] } @@ -780,9 +780,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) + cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) + cast('1' as binary)" } ] } @@ -804,9 +804,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(20, 0)) + cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) + cast('1' as binary)" } ] } @@ -828,9 +828,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(3, 0)) + cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(3, 0)) + cast(1 as boolean)" } ] } @@ -852,9 +852,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(5, 0)) + cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(5, 0)) + cast(1 as boolean)" } ] } @@ -876,9 +876,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(10, 0)) + cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(10, 0)) + cast(1 as boolean)" } ] } @@ -900,9 +900,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(20, 0)) + cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(20, 0)) + cast(1 as boolean)" } ] } @@ -924,9 +924,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(3, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(3, 0)) + cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -948,9 +948,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(5, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(5, 0)) + cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -972,9 +972,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(10, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(10, 0)) + cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -996,9 +996,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(20, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(20, 0)) + cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -1280,9 +1280,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) - cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) - cast(1 as decimal(3, 0))" } ] } @@ -1304,9 +1304,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) - cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) - cast(1 as decimal(5, 0))" } ] } @@ -1328,9 +1328,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) - cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) - cast(1 as decimal(10, 0))" } ] } @@ -1352,9 +1352,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) - cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) - cast(1 as decimal(20, 0))" } ] } @@ -1704,9 +1704,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(3, 0)) - cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) - cast('1' as binary)" } ] } @@ -1728,9 +1728,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(5, 0)) - cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) - cast('1' as binary)" } ] } @@ -1752,9 +1752,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) - cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) - cast('1' as binary)" } ] } @@ -1776,9 +1776,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(20, 0)) - cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) - cast('1' as binary)" } ] } @@ -1800,9 +1800,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(3, 0)) - cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(3, 0)) - cast(1 as boolean)" } ] } @@ -1824,9 +1824,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(5, 0)) - cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(5, 0)) - cast(1 as boolean)" } ] } @@ -1848,9 +1848,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(10, 0)) - cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(10, 0)) - cast(1 as boolean)" } ] } @@ -1872,9 +1872,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(20, 0)) - cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(20, 0)) - cast(1 as boolean)" } ] } @@ -2192,9 +2192,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) * cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) * cast(1 as decimal(3, 0))" } ] } @@ -2216,9 +2216,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) * cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) * cast(1 as decimal(5, 0))" } ] } @@ -2240,9 +2240,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) * cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) * cast(1 as decimal(10, 0))" } ] } @@ -2264,9 +2264,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) * cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) * cast(1 as decimal(20, 0))" } ] } @@ -2288,9 +2288,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(3, 0))" } ] } @@ -2312,9 +2312,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(5, 0))" } ] } @@ -2336,9 +2336,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(10, 0))" } ] } @@ -2360,9 +2360,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(20, 0))" } ] } @@ -2384,9 +2384,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(3, 0))" } ] } @@ -2408,9 +2408,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(5, 0))" } ] } @@ -2432,9 +2432,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(10, 0))" } ] } @@ -2456,9 +2456,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(20, 0))" } ] } @@ -2736,9 +2736,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(3, 0)) * cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) * cast('1' as binary)" } ] } @@ -2760,9 +2760,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(5, 0)) * cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) * cast('1' as binary)" } ] } @@ -2784,9 +2784,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) * cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) * cast('1' as binary)" } ] } @@ -2808,9 +2808,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(20, 0)) * cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) * cast('1' as binary)" } ] } @@ -2832,9 +2832,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(3, 0)) * cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(3, 0)) * cast(1 as boolean)" } ] } @@ -2856,9 +2856,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(5, 0)) * cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(5, 0)) * cast(1 as boolean)" } ] } @@ -2880,9 +2880,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(10, 0)) * cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(10, 0)) * cast(1 as boolean)" } ] } @@ -2904,9 +2904,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(20, 0)) * cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(20, 0)) * cast(1 as boolean)" } ] } @@ -2928,9 +2928,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00.0' as timestamp)" } ] } @@ -2952,9 +2952,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00.0' as timestamp)" } ] } @@ -2976,9 +2976,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00.0' as timestamp)" } ] } @@ -3000,9 +3000,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00.0' as timestamp)" } ] } @@ -3024,9 +3024,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00' as date)" } ] } @@ -3048,9 +3048,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00' as date)" } ] } @@ -3072,9 +3072,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00' as date)" } ] } @@ -3096,9 +3096,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00' as date)" } ] } @@ -3344,9 +3344,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) / cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) / cast(1 as decimal(3, 0))" } ] } @@ -3368,9 +3368,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) / cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) / cast(1 as decimal(5, 0))" } ] } @@ -3392,9 +3392,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) / cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) / cast(1 as decimal(10, 0))" } ] } @@ -3416,9 +3416,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) / cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) / cast(1 as decimal(20, 0))" } ] } @@ -3440,9 +3440,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(3, 0))" } ] } @@ -3464,9 +3464,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(5, 0))" } ] } @@ -3488,9 +3488,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0))" } ] } @@ -3512,9 +3512,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(20, 0))" } ] } @@ -3536,9 +3536,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(3, 0))" } ] } @@ -3560,9 +3560,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(5, 0))" } ] } @@ -3584,9 +3584,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(10, 0))" } ] } @@ -3608,9 +3608,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(20, 0))" } ] } @@ -3888,9 +3888,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(3, 0)) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) / cast('1' as binary)" } ] } @@ -3912,9 +3912,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(5, 0)) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) / cast('1' as binary)" } ] } @@ -3936,9 +3936,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) / cast('1' as binary)" } ] } @@ -3960,9 +3960,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(20, 0)) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) / cast('1' as binary)" } ] } @@ -3984,9 +3984,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(3, 0)) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(3, 0)) / cast(1 as boolean)" } ] } @@ -4008,9 +4008,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(5, 0)) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(5, 0)) / cast(1 as boolean)" } ] } @@ -4032,9 +4032,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(10, 0)) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(10, 0)) / cast(1 as boolean)" } ] } @@ -4056,9 +4056,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(20, 0)) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(20, 0)) / cast(1 as boolean)" } ] } @@ -4080,9 +4080,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00.0' as timestamp)" } ] } @@ -4104,9 +4104,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00.0' as timestamp)" } ] } @@ -4128,9 +4128,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00.0' as timestamp)" } ] } @@ -4152,9 +4152,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00.0' as timestamp)" } ] } @@ -4176,9 +4176,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00' as date)" } ] } @@ -4200,9 +4200,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00' as date)" } ] } @@ -4224,9 +4224,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00' as date)" } ] } @@ -4248,9 +4248,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00' as date)" } ] } @@ -4496,9 +4496,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) % cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) % cast(1 as decimal(3, 0))" } ] } @@ -4520,9 +4520,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) % cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) % cast(1 as decimal(5, 0))" } ] } @@ -4544,9 +4544,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) % cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) % cast(1 as decimal(10, 0))" } ] } @@ -4568,9 +4568,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) % cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) % cast(1 as decimal(20, 0))" } ] } @@ -4592,9 +4592,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(3, 0))" } ] } @@ -4616,9 +4616,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(5, 0))" } ] } @@ -4640,9 +4640,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(10, 0))" } ] } @@ -4664,9 +4664,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(20, 0))" } ] } @@ -4688,9 +4688,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(3, 0))" } ] } @@ -4712,9 +4712,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(5, 0))" } ] } @@ -4736,9 +4736,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(10, 0))" } ] } @@ -4760,9 +4760,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(20, 0))" } ] } @@ -5040,9 +5040,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(3, 0)) % cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) % cast('1' as binary)" } ] } @@ -5064,9 +5064,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(5, 0)) % cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) % cast('1' as binary)" } ] } @@ -5088,9 +5088,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) % cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) % cast('1' as binary)" } ] } @@ -5112,9 +5112,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(20, 0)) % cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) % cast('1' as binary)" } ] } @@ -5136,9 +5136,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(3, 0)) % cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(3, 0)) % cast(1 as boolean)" } ] } @@ -5160,9 +5160,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(5, 0)) % cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(5, 0)) % cast(1 as boolean)" } ] } @@ -5184,9 +5184,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(10, 0)) % cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(10, 0)) % cast(1 as boolean)" } ] } @@ -5208,9 +5208,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(20, 0)) % cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(20, 0)) % cast(1 as boolean)" } ] } @@ -5232,9 +5232,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -5256,9 +5256,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -5280,9 +5280,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -5304,9 +5304,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -5328,9 +5328,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00' as date)" } ] } @@ -5352,9 +5352,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00' as date)" } ] } @@ -5376,9 +5376,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00' as date)" } ] } @@ -5400,9 +5400,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00' as date)" } ] } @@ -5648,9 +5648,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 65, - "fragment" : "SELECT pmod(cast('1' as binary), cast(1 as decimal(3, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 58, + "fragment" : "pmod(cast('1' as binary), cast(1 as decimal(3, 0)))" } ] } @@ -5672,9 +5672,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 65, - "fragment" : "SELECT pmod(cast('1' as binary), cast(1 as decimal(5, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 58, + "fragment" : "pmod(cast('1' as binary), cast(1 as decimal(5, 0)))" } ] } @@ -5696,9 +5696,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 66, - "fragment" : "SELECT pmod(cast('1' as binary), cast(1 as decimal(10, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "pmod(cast('1' as binary), cast(1 as decimal(10, 0)))" } ] } @@ -5720,9 +5720,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 66, - "fragment" : "SELECT pmod(cast('1' as binary), cast(1 as decimal(20, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "pmod(cast('1' as binary), cast(1 as decimal(20, 0)))" } ] } @@ -5744,9 +5744,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 88, - "fragment" : "SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(3, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 81, + "fragment" : "pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(3, 0)))" } ] } @@ -5768,9 +5768,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 88, - "fragment" : "SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(5, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 81, + "fragment" : "pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(5, 0)))" } ] } @@ -5792,9 +5792,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 89, - "fragment" : "SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(10, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 82, + "fragment" : "pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(10, 0)))" } ] } @@ -5816,9 +5816,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 89, - "fragment" : "SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(20, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 82, + "fragment" : "pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(20, 0)))" } ] } @@ -5840,9 +5840,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 81, - "fragment" : "SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(3, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 74, + "fragment" : "pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(3, 0)))" } ] } @@ -5864,9 +5864,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 81, - "fragment" : "SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(5, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 74, + "fragment" : "pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(5, 0)))" } ] } @@ -5888,9 +5888,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 82, - "fragment" : "SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(10, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 75, + "fragment" : "pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(10, 0)))" } ] } @@ -5912,9 +5912,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 82, - "fragment" : "SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(20, 0))) FROM t" + "startIndex" : 8, + "stopIndex" : 75, + "fragment" : "pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(20, 0)))" } ] } @@ -6192,9 +6192,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 66, - "fragment" : "SELECT pmod(cast(1 as decimal(3, 0)) , cast('1' as binary)) FROM t" + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "pmod(cast(1 as decimal(3, 0)) , cast('1' as binary))" } ] } @@ -6216,9 +6216,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 66, - "fragment" : "SELECT pmod(cast(1 as decimal(5, 0)) , cast('1' as binary)) FROM t" + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "pmod(cast(1 as decimal(5, 0)) , cast('1' as binary))" } ] } @@ -6240,9 +6240,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 66, - "fragment" : "SELECT pmod(cast(1 as decimal(10, 0)), cast('1' as binary)) FROM t" + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "pmod(cast(1 as decimal(10, 0)), cast('1' as binary))" } ] } @@ -6264,9 +6264,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 66, - "fragment" : "SELECT pmod(cast(1 as decimal(20, 0)), cast('1' as binary)) FROM t" + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "pmod(cast(1 as decimal(20, 0)), cast('1' as binary))" } ] } @@ -6288,9 +6288,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 65, - "fragment" : "SELECT pmod(cast(1 as decimal(3, 0)) , cast(1 as boolean)) FROM t" + "startIndex" : 8, + "stopIndex" : 58, + "fragment" : "pmod(cast(1 as decimal(3, 0)) , cast(1 as boolean))" } ] } @@ -6312,9 +6312,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 65, - "fragment" : "SELECT pmod(cast(1 as decimal(5, 0)) , cast(1 as boolean)) FROM t" + "startIndex" : 8, + "stopIndex" : 58, + "fragment" : "pmod(cast(1 as decimal(5, 0)) , cast(1 as boolean))" } ] } @@ -6336,9 +6336,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 65, - "fragment" : "SELECT pmod(cast(1 as decimal(10, 0)), cast(1 as boolean)) FROM t" + "startIndex" : 8, + "stopIndex" : 58, + "fragment" : "pmod(cast(1 as decimal(10, 0)), cast(1 as boolean))" } ] } @@ -6360,9 +6360,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 65, - "fragment" : "SELECT pmod(cast(1 as decimal(20, 0)), cast(1 as boolean)) FROM t" + "startIndex" : 8, + "stopIndex" : 58, + "fragment" : "pmod(cast(1 as decimal(20, 0)), cast(1 as boolean))" } ] } @@ -6384,9 +6384,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 89, - "fragment" : "SELECT pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00.0' as timestamp)) FROM t" + "startIndex" : 8, + "stopIndex" : 82, + "fragment" : "pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00.0' as timestamp))" } ] } @@ -6408,9 +6408,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 89, - "fragment" : "SELECT pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00.0' as timestamp)) FROM t" + "startIndex" : 8, + "stopIndex" : 82, + "fragment" : "pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00.0' as timestamp))" } ] } @@ -6432,9 +6432,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 89, - "fragment" : "SELECT pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00.0' as timestamp)) FROM t" + "startIndex" : 8, + "stopIndex" : 82, + "fragment" : "pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00.0' as timestamp))" } ] } @@ -6456,9 +6456,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 89, - "fragment" : "SELECT pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00.0' as timestamp)) FROM t" + "startIndex" : 8, + "stopIndex" : 82, + "fragment" : "pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00.0' as timestamp))" } ] } @@ -6480,9 +6480,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 82, - "fragment" : "SELECT pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00' as date)) FROM t" + "startIndex" : 8, + "stopIndex" : 75, + "fragment" : "pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00' as date))" } ] } @@ -6504,9 +6504,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 82, - "fragment" : "SELECT pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00' as date)) FROM t" + "startIndex" : 8, + "stopIndex" : 75, + "fragment" : "pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00' as date))" } ] } @@ -6528,9 +6528,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 82, - "fragment" : "SELECT pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00' as date)) FROM t" + "startIndex" : 8, + "stopIndex" : 75, + "fragment" : "pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00' as date))" } ] } @@ -6552,9 +6552,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 82, - "fragment" : "SELECT pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00' as date)) FROM t" + "startIndex" : 8, + "stopIndex" : 75, + "fragment" : "pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00' as date))" } ] } @@ -6800,9 +6800,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) = cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) = cast(1 as decimal(3, 0))" } ] } @@ -6824,9 +6824,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) = cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) = cast(1 as decimal(5, 0))" } ] } @@ -6848,9 +6848,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) = cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) = cast(1 as decimal(10, 0))" } ] } @@ -6872,9 +6872,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) = cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) = cast(1 as decimal(20, 0))" } ] } @@ -6896,9 +6896,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(3, 0))" } ] } @@ -6920,9 +6920,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(5, 0))" } ] } @@ -6944,9 +6944,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(10, 0))" } ] } @@ -6968,9 +6968,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(20, 0))" } ] } @@ -6992,9 +6992,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(3, 0))" } ] } @@ -7016,9 +7016,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(5, 0))" } ] } @@ -7040,9 +7040,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(10, 0))" } ] } @@ -7064,9 +7064,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(20, 0))" } ] } @@ -7344,9 +7344,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(3, 0)) = cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) = cast('1' as binary)" } ] } @@ -7368,9 +7368,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(5, 0)) = cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) = cast('1' as binary)" } ] } @@ -7392,9 +7392,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) = cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) = cast('1' as binary)" } ] } @@ -7416,9 +7416,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(20, 0)) = cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) = cast('1' as binary)" } ] } @@ -7472,9 +7472,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -7496,9 +7496,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -7520,9 +7520,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -7544,9 +7544,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -7568,9 +7568,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00' as date)" } ] } @@ -7592,9 +7592,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00' as date)" } ] } @@ -7616,9 +7616,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00' as date)" } ] } @@ -7640,9 +7640,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00' as date)" } ] } @@ -7888,9 +7888,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) <=> cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) <=> cast(1 as decimal(3, 0))" } ] } @@ -7912,9 +7912,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) <=> cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) <=> cast(1 as decimal(5, 0))" } ] } @@ -7936,9 +7936,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 63, - "fragment" : "SELECT cast('1' as binary) <=> cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast('1' as binary) <=> cast(1 as decimal(10, 0))" } ] } @@ -7960,9 +7960,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 63, - "fragment" : "SELECT cast('1' as binary) <=> cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast('1' as binary) <=> cast(1 as decimal(20, 0))" } ] } @@ -7984,9 +7984,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(3, 0))" } ] } @@ -8008,9 +8008,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(5, 0))" } ] } @@ -8032,9 +8032,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 86, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 79, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(10, 0))" } ] } @@ -8056,9 +8056,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 86, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 79, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(20, 0))" } ] } @@ -8080,9 +8080,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(3, 0))" } ] } @@ -8104,9 +8104,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(5, 0))" } ] } @@ -8128,9 +8128,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 79, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 72, + "fragment" : "cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(10, 0))" } ] } @@ -8152,9 +8152,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 79, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 72, + "fragment" : "cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(20, 0))" } ] } @@ -8432,9 +8432,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 63, - "fragment" : "SELECT cast(1 as decimal(3, 0)) <=> cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast(1 as decimal(3, 0)) <=> cast('1' as binary)" } ] } @@ -8456,9 +8456,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 63, - "fragment" : "SELECT cast(1 as decimal(5, 0)) <=> cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast(1 as decimal(5, 0)) <=> cast('1' as binary)" } ] } @@ -8480,9 +8480,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 63, - "fragment" : "SELECT cast(1 as decimal(10, 0)) <=> cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast(1 as decimal(10, 0)) <=> cast('1' as binary)" } ] } @@ -8504,9 +8504,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 63, - "fragment" : "SELECT cast(1 as decimal(20, 0)) <=> cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 56, + "fragment" : "cast(1 as decimal(20, 0)) <=> cast('1' as binary)" } ] } @@ -8560,9 +8560,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 86, - "fragment" : "SELECT cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 79, + "fragment" : "cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -8584,9 +8584,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 86, - "fragment" : "SELECT cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 79, + "fragment" : "cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -8608,9 +8608,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 86, - "fragment" : "SELECT cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 79, + "fragment" : "cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -8632,9 +8632,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 86, - "fragment" : "SELECT cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 79, + "fragment" : "cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -8656,9 +8656,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 79, - "fragment" : "SELECT cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 72, + "fragment" : "cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00' as date)" } ] } @@ -8680,9 +8680,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 79, - "fragment" : "SELECT cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 72, + "fragment" : "cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00' as date)" } ] } @@ -8704,9 +8704,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 79, - "fragment" : "SELECT cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 72, + "fragment" : "cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00' as date)" } ] } @@ -8728,9 +8728,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 79, - "fragment" : "SELECT cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 72, + "fragment" : "cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00' as date)" } ] } @@ -8976,9 +8976,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) < cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) < cast(1 as decimal(3, 0))" } ] } @@ -9000,9 +9000,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) < cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) < cast(1 as decimal(5, 0))" } ] } @@ -9024,9 +9024,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) < cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) < cast(1 as decimal(10, 0))" } ] } @@ -9048,9 +9048,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) < cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) < cast(1 as decimal(20, 0))" } ] } @@ -9072,9 +9072,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(3, 0))" } ] } @@ -9096,9 +9096,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(5, 0))" } ] } @@ -9120,9 +9120,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(10, 0))" } ] } @@ -9144,9 +9144,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(20, 0))" } ] } @@ -9168,9 +9168,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(3, 0))" } ] } @@ -9192,9 +9192,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(5, 0))" } ] } @@ -9216,9 +9216,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(10, 0))" } ] } @@ -9240,9 +9240,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(20, 0))" } ] } @@ -9520,9 +9520,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(3, 0)) < cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) < cast('1' as binary)" } ] } @@ -9544,9 +9544,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(5, 0)) < cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) < cast('1' as binary)" } ] } @@ -9568,9 +9568,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) < cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) < cast('1' as binary)" } ] } @@ -9592,9 +9592,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(20, 0)) < cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) < cast('1' as binary)" } ] } @@ -9616,9 +9616,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(3, 0)) < cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(3, 0)) < cast(1 as boolean)" } ] } @@ -9640,9 +9640,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(5, 0)) < cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(5, 0)) < cast(1 as boolean)" } ] } @@ -9664,9 +9664,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(10, 0)) < cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(10, 0)) < cast(1 as boolean)" } ] } @@ -9688,9 +9688,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(20, 0)) < cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(20, 0)) < cast(1 as boolean)" } ] } @@ -9712,9 +9712,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -9736,9 +9736,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -9760,9 +9760,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -9784,9 +9784,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -9808,9 +9808,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00' as date)" } ] } @@ -9832,9 +9832,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00' as date)" } ] } @@ -9856,9 +9856,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00' as date)" } ] } @@ -9880,9 +9880,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00' as date)" } ] } @@ -10128,9 +10128,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) <= cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) <= cast(1 as decimal(3, 0))" } ] } @@ -10152,9 +10152,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) <= cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) <= cast(1 as decimal(5, 0))" } ] } @@ -10176,9 +10176,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) <= cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) <= cast(1 as decimal(10, 0))" } ] } @@ -10200,9 +10200,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) <= cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) <= cast(1 as decimal(20, 0))" } ] } @@ -10224,9 +10224,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(3, 0))" } ] } @@ -10248,9 +10248,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(5, 0))" } ] } @@ -10272,9 +10272,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(10, 0))" } ] } @@ -10296,9 +10296,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(20, 0))" } ] } @@ -10320,9 +10320,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(3, 0))" } ] } @@ -10344,9 +10344,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(5, 0))" } ] } @@ -10368,9 +10368,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(10, 0))" } ] } @@ -10392,9 +10392,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(20, 0))" } ] } @@ -10672,9 +10672,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(3, 0)) <= cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(3, 0)) <= cast('1' as binary)" } ] } @@ -10696,9 +10696,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(5, 0)) <= cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(5, 0)) <= cast('1' as binary)" } ] } @@ -10720,9 +10720,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(10, 0)) <= cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(10, 0)) <= cast('1' as binary)" } ] } @@ -10744,9 +10744,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(20, 0)) <= cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(20, 0)) <= cast('1' as binary)" } ] } @@ -10768,9 +10768,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(3, 0)) <= cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) <= cast(1 as boolean)" } ] } @@ -10792,9 +10792,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(5, 0)) <= cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) <= cast(1 as boolean)" } ] } @@ -10816,9 +10816,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) <= cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) <= cast(1 as boolean)" } ] } @@ -10840,9 +10840,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(20, 0)) <= cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) <= cast(1 as boolean)" } ] } @@ -10864,9 +10864,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -10888,9 +10888,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -10912,9 +10912,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -10936,9 +10936,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -10960,9 +10960,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00' as date)" } ] } @@ -10984,9 +10984,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00' as date)" } ] } @@ -11008,9 +11008,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00' as date)" } ] } @@ -11032,9 +11032,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00' as date)" } ] } @@ -11280,9 +11280,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) > cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) > cast(1 as decimal(3, 0))" } ] } @@ -11304,9 +11304,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast('1' as binary) > cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast('1' as binary) > cast(1 as decimal(5, 0))" } ] } @@ -11328,9 +11328,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) > cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) > cast(1 as decimal(10, 0))" } ] } @@ -11352,9 +11352,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) > cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) > cast(1 as decimal(20, 0))" } ] } @@ -11376,9 +11376,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(3, 0))" } ] } @@ -11400,9 +11400,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 83, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 76, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(5, 0))" } ] } @@ -11424,9 +11424,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(10, 0))" } ] } @@ -11448,9 +11448,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(20, 0))" } ] } @@ -11472,9 +11472,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(3, 0))" } ] } @@ -11496,9 +11496,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(5, 0))" } ] } @@ -11520,9 +11520,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(10, 0))" } ] } @@ -11544,9 +11544,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(20, 0))" } ] } @@ -11824,9 +11824,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(3, 0)) > cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) > cast('1' as binary)" } ] } @@ -11848,9 +11848,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(5, 0)) > cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) > cast('1' as binary)" } ] } @@ -11872,9 +11872,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) > cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) > cast('1' as binary)" } ] } @@ -11896,9 +11896,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(20, 0)) > cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) > cast('1' as binary)" } ] } @@ -11920,9 +11920,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(3, 0)) > cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(3, 0)) > cast(1 as boolean)" } ] } @@ -11944,9 +11944,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(5, 0)) > cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(5, 0)) > cast(1 as boolean)" } ] } @@ -11968,9 +11968,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(10, 0)) > cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(10, 0)) > cast(1 as boolean)" } ] } @@ -11992,9 +11992,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(20, 0)) > cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(20, 0)) > cast(1 as boolean)" } ] } @@ -12016,9 +12016,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -12040,9 +12040,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -12064,9 +12064,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -12088,9 +12088,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -12112,9 +12112,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00' as date)" } ] } @@ -12136,9 +12136,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00' as date)" } ] } @@ -12160,9 +12160,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00' as date)" } ] } @@ -12184,9 +12184,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00' as date)" } ] } @@ -12432,9 +12432,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) >= cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) >= cast(1 as decimal(3, 0))" } ] } @@ -12456,9 +12456,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) >= cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) >= cast(1 as decimal(5, 0))" } ] } @@ -12480,9 +12480,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) >= cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) >= cast(1 as decimal(10, 0))" } ] } @@ -12504,9 +12504,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) >= cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) >= cast(1 as decimal(20, 0))" } ] } @@ -12528,9 +12528,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(3, 0))" } ] } @@ -12552,9 +12552,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(5, 0))" } ] } @@ -12576,9 +12576,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(10, 0))" } ] } @@ -12600,9 +12600,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(20, 0))" } ] } @@ -12624,9 +12624,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(3, 0))" } ] } @@ -12648,9 +12648,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(5, 0))" } ] } @@ -12672,9 +12672,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(10, 0))" } ] } @@ -12696,9 +12696,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(20, 0))" } ] } @@ -12976,9 +12976,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(3, 0)) >= cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(3, 0)) >= cast('1' as binary)" } ] } @@ -13000,9 +13000,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(5, 0)) >= cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(5, 0)) >= cast('1' as binary)" } ] } @@ -13024,9 +13024,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(10, 0)) >= cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(10, 0)) >= cast('1' as binary)" } ] } @@ -13048,9 +13048,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(20, 0)) >= cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(20, 0)) >= cast('1' as binary)" } ] } @@ -13072,9 +13072,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(3, 0)) >= cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(3, 0)) >= cast(1 as boolean)" } ] } @@ -13096,9 +13096,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(5, 0)) >= cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(5, 0)) >= cast(1 as boolean)" } ] } @@ -13120,9 +13120,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) >= cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) >= cast(1 as boolean)" } ] } @@ -13144,9 +13144,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(20, 0)) >= cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(20, 0)) >= cast(1 as boolean)" } ] } @@ -13168,9 +13168,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -13192,9 +13192,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -13216,9 +13216,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -13240,9 +13240,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -13264,9 +13264,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00' as date)" } ] } @@ -13288,9 +13288,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00' as date)" } ] } @@ -13312,9 +13312,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00' as date)" } ] } @@ -13336,9 +13336,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00' as date)" } ] } @@ -13584,9 +13584,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) <> cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) <> cast(1 as decimal(3, 0))" } ] } @@ -13608,9 +13608,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) <> cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) <> cast(1 as decimal(5, 0))" } ] } @@ -13632,9 +13632,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) <> cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) <> cast(1 as decimal(10, 0))" } ] } @@ -13656,9 +13656,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) <> cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) <> cast(1 as decimal(20, 0))" } ] } @@ -13680,9 +13680,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(3, 0))" } ] } @@ -13704,9 +13704,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(5, 0))" } ] } @@ -13728,9 +13728,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(10, 0))" } ] } @@ -13752,9 +13752,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(20, 0))" } ] } @@ -13776,9 +13776,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(3, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(3, 0))" } ] } @@ -13800,9 +13800,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(5, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(5, 0))" } ] } @@ -13824,9 +13824,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(10, 0))" } ] } @@ -13848,9 +13848,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(20, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(20, 0))" } ] } @@ -14128,9 +14128,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(3, 0)) <> cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(3, 0)) <> cast('1' as binary)" } ] } @@ -14152,9 +14152,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(5, 0)) <> cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(5, 0)) <> cast('1' as binary)" } ] } @@ -14176,9 +14176,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(10, 0)) <> cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(10, 0)) <> cast('1' as binary)" } ] } @@ -14200,9 +14200,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as decimal(20, 0)) <> cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as decimal(20, 0)) <> cast('1' as binary)" } ] } @@ -14256,9 +14256,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -14280,9 +14280,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -14304,9 +14304,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -14328,9 +14328,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 85, - "fragment" : "SELECT cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 78, + "fragment" : "cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -14352,9 +14352,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00' as date)" } ] } @@ -14376,9 +14376,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00' as date)" } ] } @@ -14400,9 +14400,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00' as date)" } ] } @@ -14424,8 +14424,8 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00' as date)" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out index e404130cb2510..934ae2b0cdcc9 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out @@ -88,9 +88,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 54, - "fragment" : "SELECT cast(1 as tinyint) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "cast(1 as tinyint) / cast('1' as binary)" } ] } @@ -112,9 +112,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 53, - "fragment" : "SELECT cast(1 as tinyint) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "cast(1 as tinyint) / cast(1 as boolean)" } ] } @@ -136,9 +136,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as tinyint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as tinyint) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -160,9 +160,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 70, - "fragment" : "SELECT cast(1 as tinyint) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 63, + "fragment" : "cast(1 as tinyint) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -248,9 +248,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 55, - "fragment" : "SELECT cast(1 as smallint) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 48, + "fragment" : "cast(1 as smallint) / cast('1' as binary)" } ] } @@ -272,9 +272,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 54, - "fragment" : "SELECT cast(1 as smallint) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "cast(1 as smallint) / cast(1 as boolean)" } ] } @@ -296,9 +296,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast(1 as smallint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast(1 as smallint) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -320,9 +320,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 71, - "fragment" : "SELECT cast(1 as smallint) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "cast(1 as smallint) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -408,9 +408,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 50, - "fragment" : "SELECT cast(1 as int) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 43, + "fragment" : "cast(1 as int) / cast('1' as binary)" } ] } @@ -432,9 +432,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 49, - "fragment" : "SELECT cast(1 as int) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 42, + "fragment" : "cast(1 as int) / cast(1 as boolean)" } ] } @@ -456,9 +456,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 73, - "fragment" : "SELECT cast(1 as int) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 66, + "fragment" : "cast(1 as int) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -480,9 +480,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 66, - "fragment" : "SELECT cast(1 as int) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "cast(1 as int) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -568,9 +568,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 53, - "fragment" : "SELECT cast(1 as bigint) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "cast(1 as bigint) / cast('1' as binary)" } ] } @@ -592,9 +592,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 52, - "fragment" : "SELECT cast(1 as bigint) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "cast(1 as bigint) / cast(1 as boolean)" } ] } @@ -616,9 +616,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast(1 as bigint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast(1 as bigint) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -640,9 +640,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 69, - "fragment" : "SELECT cast(1 as bigint) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 62, + "fragment" : "cast(1 as bigint) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -728,9 +728,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 52, - "fragment" : "SELECT cast(1 as float) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "cast(1 as float) / cast('1' as binary)" } ] } @@ -752,9 +752,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 51, - "fragment" : "SELECT cast(1 as float) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 44, + "fragment" : "cast(1 as float) / cast(1 as boolean)" } ] } @@ -776,9 +776,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 75, - "fragment" : "SELECT cast(1 as float) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 68, + "fragment" : "cast(1 as float) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -800,9 +800,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 68, - "fragment" : "SELECT cast(1 as float) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 61, + "fragment" : "cast(1 as float) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -888,9 +888,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 53, - "fragment" : "SELECT cast(1 as double) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "cast(1 as double) / cast('1' as binary)" } ] } @@ -912,9 +912,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 52, - "fragment" : "SELECT cast(1 as double) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "cast(1 as double) / cast(1 as boolean)" } ] } @@ -936,9 +936,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast(1 as double) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast(1 as double) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -960,9 +960,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 69, - "fragment" : "SELECT cast(1 as double) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 62, + "fragment" : "cast(1 as double) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -1048,9 +1048,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast(1 as decimal(10, 0)) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast(1 as decimal(10, 0)) / cast('1' as binary)" } ] } @@ -1072,9 +1072,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as decimal(10, 0)) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as decimal(10, 0)) / cast(1 as boolean)" } ] } @@ -1096,9 +1096,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -1120,9 +1120,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -1208,9 +1208,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 53, - "fragment" : "SELECT cast(1 as string) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "cast(1 as string) / cast('1' as binary)" } ] } @@ -1232,9 +1232,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 52, - "fragment" : "SELECT cast(1 as string) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "cast(1 as string) / cast(1 as boolean)" } ] } @@ -1256,9 +1256,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast(1 as string) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast(1 as string) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -1280,9 +1280,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 69, - "fragment" : "SELECT cast(1 as string) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 62, + "fragment" : "cast(1 as string) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -1304,9 +1304,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 54, - "fragment" : "SELECT cast('1' as binary) / cast(1 as tinyint) FROM t" + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "cast('1' as binary) / cast(1 as tinyint)" } ] } @@ -1328,9 +1328,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 55, - "fragment" : "SELECT cast('1' as binary) / cast(1 as smallint) FROM t" + "startIndex" : 8, + "stopIndex" : 48, + "fragment" : "cast('1' as binary) / cast(1 as smallint)" } ] } @@ -1352,9 +1352,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 50, - "fragment" : "SELECT cast('1' as binary) / cast(1 as int) FROM t" + "startIndex" : 8, + "stopIndex" : 43, + "fragment" : "cast('1' as binary) / cast(1 as int)" } ] } @@ -1376,9 +1376,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 53, - "fragment" : "SELECT cast('1' as binary) / cast(1 as bigint) FROM t" + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "cast('1' as binary) / cast(1 as bigint)" } ] } @@ -1400,9 +1400,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 52, - "fragment" : "SELECT cast('1' as binary) / cast(1 as float) FROM t" + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "cast('1' as binary) / cast(1 as float)" } ] } @@ -1424,9 +1424,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 53, - "fragment" : "SELECT cast('1' as binary) / cast(1 as double) FROM t" + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "cast('1' as binary) / cast(1 as double)" } ] } @@ -1448,9 +1448,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 61, - "fragment" : "SELECT cast('1' as binary) / cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 54, + "fragment" : "cast('1' as binary) / cast(1 as decimal(10, 0))" } ] } @@ -1472,9 +1472,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 53, - "fragment" : "SELECT cast('1' as binary) / cast(1 as string) FROM t" + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "cast('1' as binary) / cast(1 as string)" } ] } @@ -1496,9 +1496,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 55, - "fragment" : "SELECT cast('1' as binary) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 48, + "fragment" : "cast('1' as binary) / cast('1' as binary)" } ] } @@ -1520,9 +1520,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 54, - "fragment" : "SELECT cast('1' as binary) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "cast('1' as binary) / cast(1 as boolean)" } ] } @@ -1544,9 +1544,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('1' as binary) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('1' as binary) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -1568,9 +1568,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 71, - "fragment" : "SELECT cast('1' as binary) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "cast('1' as binary) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -1592,9 +1592,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 53, - "fragment" : "SELECT cast(1 as boolean) / cast(1 as tinyint) FROM t" + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "cast(1 as boolean) / cast(1 as tinyint)" } ] } @@ -1616,9 +1616,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 54, - "fragment" : "SELECT cast(1 as boolean) / cast(1 as smallint) FROM t" + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "cast(1 as boolean) / cast(1 as smallint)" } ] } @@ -1640,9 +1640,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 49, - "fragment" : "SELECT cast(1 as boolean) / cast(1 as int) FROM t" + "startIndex" : 8, + "stopIndex" : 42, + "fragment" : "cast(1 as boolean) / cast(1 as int)" } ] } @@ -1664,9 +1664,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 52, - "fragment" : "SELECT cast(1 as boolean) / cast(1 as bigint) FROM t" + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "cast(1 as boolean) / cast(1 as bigint)" } ] } @@ -1688,9 +1688,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 51, - "fragment" : "SELECT cast(1 as boolean) / cast(1 as float) FROM t" + "startIndex" : 8, + "stopIndex" : 44, + "fragment" : "cast(1 as boolean) / cast(1 as float)" } ] } @@ -1712,9 +1712,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 52, - "fragment" : "SELECT cast(1 as boolean) / cast(1 as double) FROM t" + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "cast(1 as boolean) / cast(1 as double)" } ] } @@ -1736,9 +1736,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 60, - "fragment" : "SELECT cast(1 as boolean) / cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "cast(1 as boolean) / cast(1 as decimal(10, 0))" } ] } @@ -1760,9 +1760,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 52, - "fragment" : "SELECT cast(1 as boolean) / cast(1 as string) FROM t" + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "cast(1 as boolean) / cast(1 as string)" } ] } @@ -1784,9 +1784,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 54, - "fragment" : "SELECT cast(1 as boolean) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 47, + "fragment" : "cast(1 as boolean) / cast('1' as binary)" } ] } @@ -1808,9 +1808,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 53, - "fragment" : "SELECT cast(1 as boolean) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 46, + "fragment" : "cast(1 as boolean) / cast(1 as boolean)" } ] } @@ -1832,9 +1832,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast(1 as boolean) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast(1 as boolean) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -1856,9 +1856,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 70, - "fragment" : "SELECT cast(1 as boolean) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 63, + "fragment" : "cast(1 as boolean) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -1880,9 +1880,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as tinyint) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as tinyint)" } ] } @@ -1904,9 +1904,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as smallint) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as smallint)" } ] } @@ -1928,9 +1928,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 73, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as int) FROM t" + "startIndex" : 8, + "stopIndex" : 66, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as int)" } ] } @@ -1952,9 +1952,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as bigint) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as bigint)" } ] } @@ -1976,9 +1976,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 75, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as float) FROM t" + "startIndex" : 8, + "stopIndex" : 68, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as float)" } ] } @@ -2000,9 +2000,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as double) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as double)" } ] } @@ -2024,9 +2024,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 84, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 77, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0))" } ] } @@ -2048,9 +2048,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 76, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as string) FROM t" + "startIndex" : 8, + "stopIndex" : 69, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as string)" } ] } @@ -2072,9 +2072,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 78, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 71, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast('1' as binary)" } ] } @@ -2096,9 +2096,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as boolean)" } ] } @@ -2120,9 +2120,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 101, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 94, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -2144,9 +2144,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 94, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 87, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00' as date)" } ] } @@ -2168,9 +2168,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 70, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as tinyint) FROM t" + "startIndex" : 8, + "stopIndex" : 63, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast(1 as tinyint)" } ] } @@ -2192,9 +2192,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 71, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as smallint) FROM t" + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast(1 as smallint)" } ] } @@ -2216,9 +2216,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 66, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as int) FROM t" + "startIndex" : 8, + "stopIndex" : 59, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast(1 as int)" } ] } @@ -2240,9 +2240,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 69, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as bigint) FROM t" + "startIndex" : 8, + "stopIndex" : 62, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast(1 as bigint)" } ] } @@ -2264,9 +2264,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 68, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as float) FROM t" + "startIndex" : 8, + "stopIndex" : 61, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast(1 as float)" } ] } @@ -2288,9 +2288,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 69, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as double) FROM t" + "startIndex" : 8, + "stopIndex" : 62, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast(1 as double)" } ] } @@ -2312,9 +2312,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as decimal(10, 0)) FROM t" + "startIndex" : 8, + "stopIndex" : 70, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast(1 as decimal(10, 0))" } ] } @@ -2336,9 +2336,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 69, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as string) FROM t" + "startIndex" : 8, + "stopIndex" : 62, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast(1 as string)" } ] } @@ -2360,9 +2360,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 71, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 64, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast('1' as binary)" } ] } @@ -2384,9 +2384,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 70, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 63, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast(1 as boolean)" } ] } @@ -2408,9 +2408,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 94, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 87, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -2432,8 +2432,8 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 87, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 80, + "fragment" : "cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00' as date)" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out index 53a7269322a5a..7f953de75bf9c 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out @@ -88,9 +88,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' + cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "'1' + cast('1' as binary)" } ] } @@ -112,9 +112,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' + cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "'1' + cast(1 as boolean)" } ] } @@ -136,9 +136,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' + cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "'1' + cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -233,9 +233,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' - cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "'1' - cast('1' as binary)" } ] } @@ -257,9 +257,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' - cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "'1' - cast(1 as boolean)" } ] } @@ -362,9 +362,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' * cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "'1' * cast('1' as binary)" } ] } @@ -386,9 +386,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' * cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "'1' * cast(1 as boolean)" } ] } @@ -410,9 +410,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' * cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "'1' * cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -434,9 +434,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' * cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 48, + "fragment" : "'1' * cast('2017-12-11 09:30:00' as date)" } ] } @@ -522,9 +522,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' / cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "'1' / cast('1' as binary)" } ] } @@ -546,9 +546,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' / cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "'1' / cast(1 as boolean)" } ] } @@ -570,9 +570,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' / cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "'1' / cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -594,9 +594,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' / cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 48, + "fragment" : "'1' / cast('2017-12-11 09:30:00' as date)" } ] } @@ -682,9 +682,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' % cast('1' as binary) FROM t" + "startIndex" : 8, + "stopIndex" : 32, + "fragment" : "'1' % cast('1' as binary)" } ] } @@ -706,9 +706,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' % cast(1 as boolean) FROM t" + "startIndex" : 8, + "stopIndex" : 31, + "fragment" : "'1' % cast(1 as boolean)" } ] } @@ -730,9 +730,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' % cast('2017-12-11 09:30:00.0' as timestamp) FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "'1' % cast('2017-12-11 09:30:00.0' as timestamp)" } ] } @@ -754,9 +754,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT '1' % cast('2017-12-11 09:30:00' as date) FROM t" + "startIndex" : 8, + "stopIndex" : 48, + "fragment" : "'1' % cast('2017-12-11 09:30:00' as date)" } ] } @@ -842,9 +842,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 67, - "fragment" : "SELECT pmod('1', cast('1' as binary)) FROM t" + "startIndex" : 8, + "stopIndex" : 37, + "fragment" : "pmod('1', cast('1' as binary))" } ] } @@ -866,9 +866,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 67, - "fragment" : "SELECT pmod('1', cast(1 as boolean)) FROM t" + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "pmod('1', cast(1 as boolean))" } ] } @@ -890,9 +890,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 67, - "fragment" : "SELECT pmod('1', cast('2017-12-11 09:30:00.0' as timestamp)) FROM t" + "startIndex" : 8, + "stopIndex" : 60, + "fragment" : "pmod('1', cast('2017-12-11 09:30:00.0' as timestamp))" } ] } @@ -914,9 +914,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 67, - "fragment" : "SELECT pmod('1', cast('2017-12-11 09:30:00' as date)) FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "pmod('1', cast('2017-12-11 09:30:00' as date))" } ] } @@ -994,9 +994,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) + '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) + '1'" } ] } @@ -1018,9 +1018,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as boolean) + '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as boolean) + '1'" } ] } @@ -1042,9 +1042,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) + '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) + '1'" } ] } @@ -1131,9 +1131,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) - '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) - '1'" } ] } @@ -1155,9 +1155,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as boolean) - '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as boolean) - '1'" } ] } @@ -1253,9 +1253,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) * '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) * '1'" } ] } @@ -1277,9 +1277,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as boolean) * '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as boolean) * '1'" } ] } @@ -1301,9 +1301,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) * '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) * '1'" } ] } @@ -1325,9 +1325,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) * '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00' as date) * '1'" } ] } @@ -1405,9 +1405,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) / '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) / '1'" } ] } @@ -1429,9 +1429,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as boolean) / '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as boolean) / '1'" } ] } @@ -1453,9 +1453,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) / '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) / '1'" } ] } @@ -1477,9 +1477,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) / '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00' as date) / '1'" } ] } @@ -1557,9 +1557,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('1' as binary) % '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('1' as binary) % '1'" } ] } @@ -1581,9 +1581,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast(1 as boolean) % '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast(1 as boolean) % '1'" } ] } @@ -1605,9 +1605,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('2017-12-11 09:30:00.0' as timestamp) % '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00.0' as timestamp) % '1'" } ] } @@ -1629,9 +1629,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 62, - "fragment" : "SELECT cast('2017-12-11 09:30:00' as date) % '1' FROM t" + "startIndex" : 8, + "stopIndex" : 55, + "fragment" : "cast('2017-12-11 09:30:00' as date) % '1'" } ] } @@ -1709,9 +1709,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 67, - "fragment" : "SELECT pmod(cast('1' as binary), '1') FROM t" + "startIndex" : 8, + "stopIndex" : 37, + "fragment" : "pmod(cast('1' as binary), '1')" } ] } @@ -1733,9 +1733,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 67, - "fragment" : "SELECT pmod(cast(1 as boolean), '1') FROM t" + "startIndex" : 8, + "stopIndex" : 36, + "fragment" : "pmod(cast(1 as boolean), '1')" } ] } @@ -1757,9 +1757,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 67, - "fragment" : "SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), '1') FROM t" + "startIndex" : 8, + "stopIndex" : 60, + "fragment" : "pmod(cast('2017-12-11 09:30:00.0' as timestamp), '1')" } ] } @@ -1781,9 +1781,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 67, - "fragment" : "SELECT pmod(cast('2017-12-11 09:30:00' as date), '1') FROM t" + "startIndex" : 8, + "stopIndex" : 53, + "fragment" : "pmod(cast('2017-12-11 09:30:00' as date), '1')" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out index 4cc2ca9fdecf0..ace8d483a985d 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out @@ -497,8 +497,8 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 100, - "fragment" : "select\n (select udf(max((select i.unique2 from tenk1 i where i.unique1 = o.unique1))))\nfrom tenk1 o" + "startIndex" : 75, + "stopIndex" : 83, + "fragment" : "o.unique1" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out index 45f1a631b4597..de415c9971874 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out @@ -3284,9 +3284,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 91, - "fragment" : "select * from\n int8_tbl x join (int4_tbl x cross join int4_tbl y) j on udf(q1) = udf(y.f1)" + "startIndex" : 87, + "stopIndex" : 90, + "fragment" : "y.f1" } ] } @@ -3318,9 +3318,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 77, - "fragment" : "select udf(t1.uunique1) from\n tenk1 t1 join tenk2 t2 on t1.two = udf(t2.two)" + "startIndex" : 12, + "stopIndex" : 22, + "fragment" : "t1.uunique1" } ] } @@ -3343,9 +3343,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 82, - "fragment" : "select udf(udf(t2.uunique1)) from\n tenk1 t1 join tenk2 t2 on udf(t1.two) = t2.two" + "startIndex" : 16, + "stopIndex" : 26, + "fragment" : "t2.uunique1" } ] } @@ -3368,9 +3368,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 79, - "fragment" : "select udf(uunique1) from\n tenk1 t1 join tenk2 t2 on udf(t1.two) = udf(t2.two)" + "startIndex" : 12, + "stopIndex" : 19, + "fragment" : "uunique1" } ] } @@ -3582,9 +3582,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 68, - "fragment" : "select udf(udf(f1,g)) from int4_tbl a, (select udf(udf(f1)) as g) ss" + "startIndex" : 56, + "stopIndex" : 57, + "fragment" : "f1" } ] } @@ -3605,9 +3605,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 55, - "fragment" : "select udf(f1,g) from int4_tbl a, (select a.f1 as g) ss" + "startIndex" : 43, + "stopIndex" : 46, + "fragment" : "a.f1" } ] } @@ -3628,9 +3628,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 73, - "fragment" : "select udf(udf(f1,g)) from int4_tbl a cross join (select udf(f1) as g) ss" + "startIndex" : 62, + "stopIndex" : 63, + "fragment" : "f1" } ] } @@ -3651,9 +3651,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 75, - "fragment" : "select udf(f1,g) from int4_tbl a cross join (select udf(udf(a.f1)) as g) ss" + "startIndex" : 61, + "stopIndex" : 64, + "fragment" : "a.f1" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out index d6c39b73df994..4bda849709df5 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out @@ -160,9 +160,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 50, - "fragment" : "SELECT 1 AS one FROM test_having HAVING udf(a) > 1" + "startIndex" : 45, + "stopIndex" : 45, + "fragment" : "a" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out index 804f7287437b2..f448206594897 100755 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out @@ -133,9 +133,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 63, - "stopIndex" : 77, - "fragment" : "ORDER BY udf(b)" + "startIndex" : 76, + "stopIndex" : 76, + "fragment" : "b" } ] } @@ -367,9 +367,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 68, - "stopIndex" : 82, - "fragment" : "ORDER BY udf(b)" + "startIndex" : 81, + "stopIndex" : 81, + "fragment" : "b" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out index 6e5f9c4ff9946..969ad69c27996 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out @@ -48,14 +48,7 @@ org.apache.spark.sql.AnalysisException "sqlState" : "42000", "messageParameters" : { "expression" : "\"a\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 49, - "stopIndex" : 58, - "fragment" : "GROUP BY b" - } ] + } } @@ -125,14 +118,7 @@ org.apache.spark.sql.AnalysisException "sqlState" : "42000", "messageParameters" : { "expression" : "\"a\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 48, - "stopIndex" : 61, - "fragment" : "GROUP BY a + 1" - } ] + } } @@ -213,14 +199,7 @@ org.apache.spark.sql.AnalysisException "sqlState" : "42000", "messageParameters" : { "expression" : "\"k\"" - }, - "queryContext" : [ { - "objectType" : "", - "objectName" : "", - "startIndex" : 69, - "stopIndex" : 83, - "fragment" : "GROUP BY udf(a)" - } ] + } } @@ -249,9 +228,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 49, + "startIndex" : 58, "stopIndex" : 58, - "fragment" : "GROUP BY k" + "fragment" : "k" } ] } diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out index dad37991f78af..1cd1db599ad06 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out @@ -240,9 +240,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, + "startIndex" : 62, "stopIndex" : 118, - "fragment" : "SELECT * FROM (\n SELECT course, earnings FROM courseSales\n)\nPIVOT (\n udf(sum(earnings))\n FOR year IN (2012, 2013)\n)" + "fragment" : "PIVOT (\n udf(sum(earnings))\n FOR year IN (2012, 2013)\n)" } ] } @@ -357,9 +357,9 @@ org.apache.spark.sql.AnalysisException "queryContext" : [ { "objectType" : "", "objectName" : "", - "startIndex" : 1, - "stopIndex" : 80, - "fragment" : "SELECT * FROM courseSales\nPIVOT (\n udf(sum(earnings))\n FOR year IN (s, 2013)\n)" + "startIndex" : 71, + "stopIndex" : 71, + "fragment" : "s" } ] } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala index efe7cb8576496..59382d220f5b8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala @@ -2537,8 +2537,13 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException](df.selectExpr("transform(a, x -> x)")), errorClass = "UNRESOLVED_COLUMN", - errorSubClass = Some("WITH_SUGGESTION"), - parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`")) + errorSubClass = "WITH_SUGGESTION", + sqlState = None, + parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`"), + context = ExpectedContext( + fragment = "a", + start = 10, + stop = 10)) } test("map_filter") { @@ -2610,8 +2615,14 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException](df.selectExpr("map_filter(a, (k, v) -> k > v)")), errorClass = "UNRESOLVED_COLUMN", - errorSubClass = Some("WITH_SUGGESTION"), - parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`")) + errorSubClass = "WITH_SUGGESTION", + sqlState = None, + parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`"), + context = ExpectedContext( + fragment = "a", + start = 11, + stop = 11) + ) } test("filter function - array for primitive type not containing null") { @@ -2771,8 +2782,13 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException](df.selectExpr("filter(a, x -> x)")), errorClass = "UNRESOLVED_COLUMN", - errorSubClass = Some("WITH_SUGGESTION"), - parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`")) + errorSubClass = "WITH_SUGGESTION", + sqlState = None, + parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`"), + context = ExpectedContext( + fragment = "a", + start = 7, + stop = 7)) } test("exists function - array for primitive type not containing null") { @@ -2905,8 +2921,13 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException](df.selectExpr("exists(a, x -> x)")), errorClass = "UNRESOLVED_COLUMN", - errorSubClass = Some("WITH_SUGGESTION"), - parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`")) + errorSubClass = "WITH_SUGGESTION", + sqlState = None, + parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`"), + context = ExpectedContext( + fragment = "a", + start = 7, + stop = 7)) } test("forall function - array for primitive type not containing null") { @@ -3053,8 +3074,13 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException](df.selectExpr("forall(a, x -> x)")), errorClass = "UNRESOLVED_COLUMN", - errorSubClass = Some("WITH_SUGGESTION"), - parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`")) + errorSubClass = "WITH_SUGGESTION", + sqlState = None, + parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`"), + context = ExpectedContext( + fragment = "a", + start = 7, + stop = 7)) checkError( exception = @@ -3239,8 +3265,13 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException](df.selectExpr("aggregate(a, 0, (acc, x) -> x)")), errorClass = "UNRESOLVED_COLUMN", - errorSubClass = Some("WITH_SUGGESTION"), - parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`")) + errorSubClass = "WITH_SUGGESTION", + sqlState = None, + parameters = Map("objectName" -> "`a`", "proposal" -> "`i`, `s`"), + context = ExpectedContext( + fragment = "a", + start = 10, + stop = 10)) } test("map_zip_with function - map of primitive types") { @@ -3795,8 +3826,14 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { exception = intercept[AnalysisException](df.selectExpr("zip_with(a1, a, (acc, x) -> x)")), errorClass = "UNRESOLVED_COLUMN", - errorSubClass = Some("WITH_SUGGESTION"), - parameters = Map("objectName" -> "`a`", "proposal" -> "`a1`, `a2`, `i`")) + errorSubClass = "WITH_SUGGESTION", + sqlState = None, + parameters = Map("objectName" -> "`a`", "proposal" -> "`a1`, `a2`, `i`"), + context = ExpectedContext( + fragment = "a", + start = 13, + stop = 13) + ) } private def assertValuesDoNotChangeAfterCoalesceOrUnion(v: Column): Unit = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala index d7ea766b21b63..5be6d53f6e10c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala @@ -327,20 +327,30 @@ class DatasetSuite extends QueryTest ds.select(expr("`(_1)?+.+`").as[Int]) }, errorClass = "UNRESOLVED_COLUMN", - errorSubClass = Some("WITH_SUGGESTION"), + errorSubClass = "WITH_SUGGESTION", + sqlState = None, parameters = Map( "objectName" -> "`(_1)?+.+`", - "proposal" -> "`_1`, `_2`")) + "proposal" -> "`_1`, `_2`"), + context = ExpectedContext( + fragment = "`(_1)?+.+`", + start = 0, + stop = 9)) checkError( exception = intercept[AnalysisException] { ds.select(expr("`(_1|_2)`").as[Int]) }, errorClass = "UNRESOLVED_COLUMN", - errorSubClass = Some("WITH_SUGGESTION"), + errorSubClass = "WITH_SUGGESTION", + sqlState = None, parameters = Map( "objectName" -> "`(_1|_2)`", - "proposal" -> "`_1`, `_2`")) + "proposal" -> "`_1`, `_2`"), + context = ExpectedContext( + fragment = "`(_1|_2)`", + start = 0, + stop = 8)) var e = intercept[AnalysisException] { ds.select(ds("`(_1)?+.+`")) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 96825c8003f4a..215b005319e29 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -2676,9 +2676,10 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark "left" -> "\"STRUCT\"", "right" -> "\"STRUCT\""), context = ExpectedContext( - fragment = query, - start = 0, - stop = 29)) + fragment = "c = C", + start = 25, + stop = 29 + )) } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala index 6116b1aa6794b..2c2c3b86317e6 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala @@ -899,7 +899,9 @@ class SubquerySuite extends QueryTest "objectName" -> "`a`", "proposal" -> "`t`.`i`, `t`.`j`"), context = ExpectedContext( - fragment = query, start = 0, stop = 42)) + fragment = "a", + start = 37, + stop = 37)) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala index 2897ef6c39a8c..00c774e2d1bee 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala @@ -417,7 +417,10 @@ class QueryCompilationErrorsSuite "proposal" -> "`__auto_generated_subquery_name`.`m`, `__auto_generated_subquery_name`.`aa`"), context = ExpectedContext( - fragment = query, start = 0, stop = 55)) + fragment = "a", + start = 9, + stop = 9) + ) } test("UNRESOLVED_COLUMN: SELECT distinct does not work correctly " + @@ -451,7 +454,9 @@ class QueryCompilationErrorsSuite "proposal" -> "`a`, `b`" ), context = ExpectedContext( - fragment = "order by struct.a, struct.b", start = 171, stop = 197) + fragment = "struct.a", + start = 180, + stop = 187) ) } @@ -470,7 +475,9 @@ class QueryCompilationErrorsSuite "objectName" -> "`v`.`i`", "proposal" -> "`__auto_generated_subquery_name`.`i`"), context = ExpectedContext( - fragment = query, start = 0, stop = 32)) + fragment = "v.i", + start = 7, + stop = 9)) checkAnswer(sql("SELECT __auto_generated_subquery_name.i from (SELECT i FROM v)"), Row(1)) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala index d6b2c8f48f5e5..091993c63499a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala @@ -112,7 +112,10 @@ class DescribeTableSuite extends command.DescribeTableSuiteBase "objectName" -> "`key1`", "proposal" -> "`test_catalog`.`ns`.`tbl`.`key`, `test_catalog`.`ns`.`tbl`.`col`"), context = ExpectedContext( - fragment = query, start = 0, stop = 28)) + fragment = query, + start = 0, + stop = query.length -1) + ) } } @@ -141,7 +144,9 @@ class DescribeTableSuite extends command.DescribeTableSuiteBase "objectName" -> "`KEY`", "proposal" -> "`test_catalog`.`ns`.`tbl`.`key`"), context = ExpectedContext( - fragment = query, start = 0, stop = 27)) + fragment = query, + start = 0, + stop = query.length - 1)) } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala index b93bef4f28341..c3b3ba6a17477 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala @@ -2113,9 +2113,9 @@ class InsertSuite extends DataSourceTest with SharedSparkSession { "proposal" -> "`__auto_generated_subquery_name`.`c1`, `__auto_generated_subquery_name`.`c2`"), context = ExpectedContext( - fragment = insert, - start = 1, - stop = insert.length)) + fragment = "c3", + start = insert.length + 26, + stop = insert.length + 27)) } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala index 50c08d8807447..756abe297a61b 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala @@ -135,7 +135,9 @@ class HiveParquetSuite extends QueryTest "proposal" -> ("`__auto_generated_subquery_name`.`c1`, " + "`__auto_generated_subquery_name`.`c2`")), context = ExpectedContext( - fragment = query.trim, start = 1, stop = 118) + fragment = "c3", + start = 61, + stop = 62) ) } }