diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala index 95d13b17ee418..ade20dbff83ff 100644 --- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala @@ -389,6 +389,19 @@ abstract class SparkFunSuite context: QueryContext): Unit = checkError(exception, errorClass, Some(errorSubClass), None, parameters, false, Array(context)) + case class ExpectedContext( + objectType: String, + objectName: String, + startIndex: Int, + stopIndex: Int, + fragment: String) extends QueryContext + + object ExpectedContext { + def apply(fragment: String, start: Int, stop: Int): ExpectedContext = { + ExpectedContext("", "", start, stop, fragment) + } + } + class LogAppender(msg: String = "", maxEvents: Int = 1000) extends AbstractAppender("logAppender", null, null, true, Property.EMPTY_ARRAY) { private val _loggingEvents = new ArrayBuffer[LogEvent]() diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala index 7e0d89b0fd23b..e1f3cc060c86a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala @@ -23,7 +23,6 @@ import org.apache.spark.TestUtils.{assertNotSpilled, assertSpilled} import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression} import org.apache.spark.sql.catalyst.optimizer.TransposeWindow import org.apache.spark.sql.catalyst.plans.physical.HashPartitioning -import org.apache.spark.sql.errors.QueryErrorsSuiteBase import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper import org.apache.spark.sql.execution.exchange.{ENSURE_REQUIREMENTS, Exchange, ShuffleExchangeExec} import org.apache.spark.sql.execution.window.WindowExec @@ -38,7 +37,6 @@ import org.apache.spark.sql.types._ */ class DataFrameWindowFunctionsSuite extends QueryTest with SharedSparkSession - with QueryErrorsSuiteBase with AdaptiveSparkPlanHelper { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetUnpivotSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetUnpivotSuite.scala index 5c14e9df09d1f..2bac51d159da8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetUnpivotSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetUnpivotSuite.scala @@ -17,7 +17,6 @@ package org.apache.spark.sql -import org.apache.spark.sql.errors.QueryErrorsSuiteBase import org.apache.spark.sql.functions.{length, struct, sum} import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types._ @@ -26,7 +25,6 @@ import org.apache.spark.sql.types._ * Comprehensive tests for Dataset.unpivot. */ class DatasetUnpivotSuite extends QueryTest - with QueryErrorsSuiteBase with SharedSparkSession { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala index 7f812e7709efa..b7726fa1c9020 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala @@ -20,7 +20,6 @@ package org.apache.spark.sql import org.apache.spark.SparkConf import org.apache.spark.sql.catalyst.expressions.Hex import org.apache.spark.sql.connector.catalog.InMemoryPartitionTableCatalog -import org.apache.spark.sql.errors.QueryErrorsSuiteBase import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils} import org.apache.spark.unsafe.types.UTF8String @@ -28,7 +27,7 @@ import org.apache.spark.unsafe.types.UTF8String /** * The base trait for SQL INSERT. */ -trait SQLInsertTestSuite extends QueryTest with SQLTestUtils with QueryErrorsSuiteBase { +trait SQLInsertTestSuite extends QueryTest with SQLTestUtils { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 4ac294276c073..96825c8003f4a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -35,7 +35,6 @@ import org.apache.spark.sql.catalyst.expressions.aggregate.{Complete, Partial} import org.apache.spark.sql.catalyst.optimizer.{ConvertToLocalRelation, NestedColumnAliasingSuite} import org.apache.spark.sql.catalyst.plans.logical.{LocalLimit, Project, RepartitionByExpression, Sort} import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME -import org.apache.spark.sql.errors.QueryErrorsSuiteBase import org.apache.spark.sql.execution.{CommandResultExec, UnionExec} import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper import org.apache.spark.sql.execution.aggregate._ @@ -59,7 +58,7 @@ import org.apache.spark.util.ResetSystemProperties @ExtendedSQLTest class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSparkPlanHelper - with ResetSystemProperties with QueryErrorsSuiteBase { + with ResetSystemProperties { import testImplicits._ setupTestData() diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala index 36fcd042e05ec..6116b1aa6794b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala @@ -21,7 +21,6 @@ import scala.collection.mutable.ArrayBuffer import org.apache.spark.sql.catalyst.expressions.SubqueryExpression import org.apache.spark.sql.catalyst.plans.logical.{Join, LogicalPlan, Project, Sort} -import org.apache.spark.sql.errors.QueryErrorsSuiteBase import org.apache.spark.sql.execution._ import org.apache.spark.sql.execution.adaptive.{AdaptiveSparkPlanHelper, DisableAdaptiveExecution} import org.apache.spark.sql.execution.datasources.FileScanRDD @@ -32,7 +31,6 @@ import org.apache.spark.sql.test.SharedSparkSession class SubquerySuite extends QueryTest with SharedSparkSession - with QueryErrorsSuiteBase with AdaptiveSparkPlanHelper { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala index 4a847ca0340ec..5b0b88c080856 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala @@ -23,8 +23,7 @@ import org.apache.spark.sql.connector.{DatasourceV2SQLBase, FakeV2Provider, Inse class QueryCompilationErrorsDSv2Suite extends QueryTest with DatasourceV2SQLBase - with InsertIntoSQLOnlyTests - with QueryErrorsSuiteBase { + with InsertIntoSQLOnlyTests { private val v2Source = classOf[FakeV2Provider].getName override protected val v2Format = v2Source diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala index 4258a0296cf8c..f07451b4dd560 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala @@ -35,7 +35,6 @@ case class ArrayClass(arr: Seq[StringIntClass]) class QueryCompilationErrorsSuite extends QueryTest - with QueryErrorsSuiteBase with SharedSparkSession { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala deleted file mode 100644 index bf1c2bb812d6c..0000000000000 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.sql.errors - -import org.apache.spark.QueryContext - -trait QueryErrorsSuiteBase { - - case class ExpectedContext( - objectType: String, - objectName: String, - startIndex: Int, - stopIndex: Int, - fragment: String) extends QueryContext - - object ExpectedContext { - def apply(fragment: String, start: Int, stop: Int): ExpectedContext = { - ExpectedContext("", "", start, stop, fragment) - } - } -} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala index a2728c4886ff4..fa812ecee4c6a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala @@ -25,8 +25,7 @@ import org.apache.spark.sql.types.ByteType // Test suite for all the execution errors that requires enable ANSI SQL mode. class QueryExecutionAnsiErrorsSuite extends QueryTest - with SharedSparkSession - with QueryErrorsSuiteBase { + with SharedSparkSession { override def sparkConf: SparkConf = super.sparkConf.set(SQLConf.ANSI_ENABLED.key, "true") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala index bb415ba6afc0e..e55ec53f927cc 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala @@ -50,8 +50,7 @@ class QueryExecutionErrorsSuite extends QueryTest with ParquetTest with OrcTest - with SharedSparkSession - with QueryErrorsSuiteBase { + with SharedSparkSession { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala index 883b581d91b66..be438a37a6b70 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala @@ -24,7 +24,7 @@ import org.apache.spark.sql.test.SharedSparkSession // Turn of the length check because most of the tests check entire error messages // scalastyle:off line.size.limit -class QueryParsingErrorsSuite extends QueryTest with QueryErrorsSuiteBase with SharedSparkSession { +class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { private def parseException(sqlText: String): SparkThrowable = { intercept[ParseException](sql(sqlText).collect()) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index 0689dadac7ba9..ad1e64c69a70a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -25,7 +25,6 @@ import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.Project import org.apache.spark.sql.catalyst.trees.Origin import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME -import org.apache.spark.sql.errors.QueryErrorsSuiteBase import org.apache.spark.sql.internal.SQLConf._ import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils} @@ -34,7 +33,7 @@ class SimpleSQLViewSuite extends SQLViewSuite with SharedSparkSession /** * A suite for testing view related functionality. */ -abstract class SQLViewSuite extends QueryTest with SQLTestUtils with QueryErrorsSuiteBase { +abstract class SQLViewSuite extends QueryTest with SQLTestUtils { import testImplicits._ protected override def beforeAll(): Unit = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala index eba9c92e3caf6..b5a294025a875 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala @@ -21,9 +21,8 @@ import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartition import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.ShowPartitions -import org.apache.spark.sql.errors.QueryErrorsSuiteBase -class ShowPartitionsParserSuite extends AnalysisTest with QueryErrorsSuiteBase { +class ShowPartitionsParserSuite extends AnalysisTest { test("SHOW PARTITIONS") { val commandName = "SHOW PARTITIONS" Seq( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala index 4a38ed9e2b903..0b9ad9628ada3 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala @@ -21,9 +21,8 @@ import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartition import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.{TruncatePartition, TruncateTable} -import org.apache.spark.sql.errors.QueryErrorsSuiteBase -class TruncateTableParserSuite extends AnalysisTest with QueryErrorsSuiteBase { +class TruncateTableParserSuite extends AnalysisTest { test("truncate table") { comparePlans( parsePlan("TRUNCATE TABLE a.b.c"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala index 93d210a47f9d7..d6b2c8f48f5e5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala @@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command.v2 import org.apache.spark.sql.{AnalysisException, QueryTest, Row} import org.apache.spark.sql.connector.catalog.TableCatalog -import org.apache.spark.sql.errors.QueryErrorsSuiteBase import org.apache.spark.sql.execution.command import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.StringType @@ -29,8 +28,7 @@ import org.apache.spark.util.Utils * The class contains tests for the `DESCRIBE TABLE` command to check V2 table catalogs. */ class DescribeTableSuite extends command.DescribeTableSuiteBase - with CommandSuiteBase - with QueryErrorsSuiteBase { + with CommandSuiteBase { test("Describing a partition is not supported") { withNamespaceAndTable("ns", "table") { tbl => diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala index 735d3f1580c3d..fa3241fe59bf3 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala @@ -28,7 +28,7 @@ import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType} import org.apache.spark.sql.catalyst.parser.ParseException -import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryErrorsSuiteBase} +import org.apache.spark.sql.errors.QueryCompilationErrors import org.apache.spark.sql.execution.datasources.DataSourceUtils import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf.PartitionOverwriteMode @@ -57,7 +57,7 @@ case class SimpleInsert(userSpecifiedSchema: StructType)(@transient val sparkSes } } -class InsertSuite extends DataSourceTest with QueryErrorsSuiteBase with SharedSparkSession { +class InsertSuite extends DataSourceTest with SharedSparkSession { import testImplicits._ protected override lazy val sql = spark.sql _ diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala index 5b0dc4adce9b7..50c08d8807447 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala @@ -21,7 +21,6 @@ import java.time.{Duration, Period} import java.time.temporal.ChronoUnit import org.apache.spark.sql.{AnalysisException, QueryTest, Row} -import org.apache.spark.sql.errors.QueryErrorsSuiteBase import org.apache.spark.sql.execution.datasources.parquet.ParquetTest import org.apache.spark.sql.hive.test.TestHiveSingleton import org.apache.spark.sql.internal.SQLConf @@ -30,8 +29,7 @@ case class Cases(lower: String, UPPER: String) class HiveParquetSuite extends QueryTest with ParquetTest - with TestHiveSingleton - with QueryErrorsSuiteBase { + with TestHiveSingleton { test("Case insensitive attribute names") { withParquetTable((1 to 4).map(i => Cases(i.toString, i.toString)), "cases") {