Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions core/src/test/scala/org/apache/spark/SparkFunSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -389,6 +389,19 @@ abstract class SparkFunSuite
context: QueryContext): Unit =
checkError(exception, errorClass, Some(errorSubClass), None, parameters, false, Array(context))

case class ExpectedContext(
objectType: String,
objectName: String,
startIndex: Int,
stopIndex: Int,
fragment: String) extends QueryContext

object ExpectedContext {
def apply(fragment: String, start: Int, stop: Int): ExpectedContext = {
ExpectedContext("", "", start, stop, fragment)
}
}

class LogAppender(msg: String = "", maxEvents: Int = 1000)
extends AbstractAppender("logAppender", null, null, true, Property.EMPTY_ARRAY) {
private val _loggingEvents = new ArrayBuffer[LogEvent]()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import org.apache.spark.TestUtils.{assertNotSpilled, assertSpilled}
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression}
import org.apache.spark.sql.catalyst.optimizer.TransposeWindow
import org.apache.spark.sql.catalyst.plans.physical.HashPartitioning
import org.apache.spark.sql.errors.QueryErrorsSuiteBase
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
import org.apache.spark.sql.execution.exchange.{ENSURE_REQUIREMENTS, Exchange, ShuffleExchangeExec}
import org.apache.spark.sql.execution.window.WindowExec
Expand All @@ -38,7 +37,6 @@ import org.apache.spark.sql.types._
*/
class DataFrameWindowFunctionsSuite extends QueryTest
with SharedSparkSession
with QueryErrorsSuiteBase
with AdaptiveSparkPlanHelper {

import testImplicits._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.spark.sql

import org.apache.spark.sql.errors.QueryErrorsSuiteBase
import org.apache.spark.sql.functions.{length, struct, sum}
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
Expand All @@ -26,7 +25,6 @@ import org.apache.spark.sql.types._
* Comprehensive tests for Dataset.unpivot.
*/
class DatasetUnpivotSuite extends QueryTest
with QueryErrorsSuiteBase
with SharedSparkSession {
import testImplicits._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,14 @@ package org.apache.spark.sql
import org.apache.spark.SparkConf
import org.apache.spark.sql.catalyst.expressions.Hex
import org.apache.spark.sql.connector.catalog.InMemoryPartitionTableCatalog
import org.apache.spark.sql.errors.QueryErrorsSuiteBase
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
import org.apache.spark.unsafe.types.UTF8String

/**
* The base trait for SQL INSERT.
*/
trait SQLInsertTestSuite extends QueryTest with SQLTestUtils with QueryErrorsSuiteBase {
trait SQLInsertTestSuite extends QueryTest with SQLTestUtils {

import testImplicits._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ import org.apache.spark.sql.catalyst.expressions.aggregate.{Complete, Partial}
import org.apache.spark.sql.catalyst.optimizer.{ConvertToLocalRelation, NestedColumnAliasingSuite}
import org.apache.spark.sql.catalyst.plans.logical.{LocalLimit, Project, RepartitionByExpression, Sort}
import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.errors.QueryErrorsSuiteBase
import org.apache.spark.sql.execution.{CommandResultExec, UnionExec}
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
import org.apache.spark.sql.execution.aggregate._
Expand All @@ -59,7 +58,7 @@ import org.apache.spark.util.ResetSystemProperties

@ExtendedSQLTest
class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSparkPlanHelper
with ResetSystemProperties with QueryErrorsSuiteBase {
with ResetSystemProperties {
import testImplicits._

setupTestData()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import scala.collection.mutable.ArrayBuffer

import org.apache.spark.sql.catalyst.expressions.SubqueryExpression
import org.apache.spark.sql.catalyst.plans.logical.{Join, LogicalPlan, Project, Sort}
import org.apache.spark.sql.errors.QueryErrorsSuiteBase
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.adaptive.{AdaptiveSparkPlanHelper, DisableAdaptiveExecution}
import org.apache.spark.sql.execution.datasources.FileScanRDD
Expand All @@ -32,7 +31,6 @@ import org.apache.spark.sql.test.SharedSparkSession

class SubquerySuite extends QueryTest
with SharedSparkSession
with QueryErrorsSuiteBase
with AdaptiveSparkPlanHelper {
import testImplicits._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,7 @@ import org.apache.spark.sql.connector.{DatasourceV2SQLBase, FakeV2Provider, Inse
class QueryCompilationErrorsDSv2Suite
extends QueryTest
with DatasourceV2SQLBase
with InsertIntoSQLOnlyTests
with QueryErrorsSuiteBase {
with InsertIntoSQLOnlyTests {

private val v2Source = classOf[FakeV2Provider].getName
override protected val v2Format = v2Source
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ case class ArrayClass(arr: Seq[StringIntClass])

class QueryCompilationErrorsSuite
extends QueryTest
with QueryErrorsSuiteBase
with SharedSparkSession {
import testImplicits._

Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ import org.apache.spark.sql.types.ByteType

// Test suite for all the execution errors that requires enable ANSI SQL mode.
class QueryExecutionAnsiErrorsSuite extends QueryTest
with SharedSparkSession
with QueryErrorsSuiteBase {
with SharedSparkSession {

override def sparkConf: SparkConf = super.sparkConf.set(SQLConf.ANSI_ENABLED.key, "true")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,7 @@ class QueryExecutionErrorsSuite
extends QueryTest
with ParquetTest
with OrcTest
with SharedSparkSession
with QueryErrorsSuiteBase {
with SharedSparkSession {

import testImplicits._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import org.apache.spark.sql.test.SharedSparkSession

// Turn of the length check because most of the tests check entire error messages
// scalastyle:off line.size.limit
class QueryParsingErrorsSuite extends QueryTest with QueryErrorsSuiteBase with SharedSparkSession {
class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession {

private def parseException(sqlText: String): SparkThrowable = {
intercept[ParseException](sql(sqlText).collect())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.logical.Project
import org.apache.spark.sql.catalyst.trees.Origin
import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.errors.QueryErrorsSuiteBase
import org.apache.spark.sql.internal.SQLConf._
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}

Expand All @@ -34,7 +33,7 @@ class SimpleSQLViewSuite extends SQLViewSuite with SharedSparkSession
/**
* A suite for testing view related functionality.
*/
abstract class SQLViewSuite extends QueryTest with SQLTestUtils with QueryErrorsSuiteBase {
abstract class SQLViewSuite extends QueryTest with SQLTestUtils {
import testImplicits._

protected override def beforeAll(): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,8 @@ import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartition
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.logical.ShowPartitions
import org.apache.spark.sql.errors.QueryErrorsSuiteBase

class ShowPartitionsParserSuite extends AnalysisTest with QueryErrorsSuiteBase {
class ShowPartitionsParserSuite extends AnalysisTest {
test("SHOW PARTITIONS") {
val commandName = "SHOW PARTITIONS"
Seq(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,8 @@ import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedPartition
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.logical.{TruncatePartition, TruncateTable}
import org.apache.spark.sql.errors.QueryErrorsSuiteBase

class TruncateTableParserSuite extends AnalysisTest with QueryErrorsSuiteBase {
class TruncateTableParserSuite extends AnalysisTest {
test("truncate table") {
comparePlans(
parsePlan("TRUNCATE TABLE a.b.c"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command.v2

import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.connector.catalog.TableCatalog
import org.apache.spark.sql.errors.QueryErrorsSuiteBase
import org.apache.spark.sql.execution.command
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.StringType
Expand All @@ -29,8 +28,7 @@ import org.apache.spark.util.Utils
* The class contains tests for the `DESCRIBE TABLE` command to check V2 table catalogs.
*/
class DescribeTableSuite extends command.DescribeTableSuiteBase
with CommandSuiteBase
with QueryErrorsSuiteBase {
with CommandSuiteBase {

test("Describing a partition is not supported") {
withNamespaceAndTable("ns", "table") { tbl =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType}
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryErrorsSuiteBase}
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.execution.datasources.DataSourceUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.PartitionOverwriteMode
Expand Down Expand Up @@ -57,7 +57,7 @@ case class SimpleInsert(userSpecifiedSchema: StructType)(@transient val sparkSes
}
}

class InsertSuite extends DataSourceTest with QueryErrorsSuiteBase with SharedSparkSession {
class InsertSuite extends DataSourceTest with SharedSparkSession {
import testImplicits._

protected override lazy val sql = spark.sql _
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import java.time.{Duration, Period}
import java.time.temporal.ChronoUnit

import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.errors.QueryErrorsSuiteBase
import org.apache.spark.sql.execution.datasources.parquet.ParquetTest
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.internal.SQLConf
Expand All @@ -30,8 +29,7 @@ case class Cases(lower: String, UPPER: String)

class HiveParquetSuite extends QueryTest
with ParquetTest
with TestHiveSingleton
with QueryErrorsSuiteBase {
with TestHiveSingleton {

test("Case insensitive attribute names") {
withParquetTable((1 to 4).map(i => Cases(i.toString, i.toString)), "cases") {
Expand Down