diff --git a/common/utils/src/main/resources/error/error-classes.json b/common/utils/src/main/resources/error/error-classes.json index 700b1ed07513..9f68d4c5a53e 100644 --- a/common/utils/src/main/resources/error/error-classes.json +++ b/common/utils/src/main/resources/error/error-classes.json @@ -875,12 +875,6 @@ ], "sqlState" : "42K01" }, - "DATA_SOURCE_ALREADY_EXISTS" : { - "message" : [ - "Data source '' already exists in the registry. Please use a different name for the new data source." - ], - "sqlState" : "42710" - }, "DATA_SOURCE_NOT_EXIST" : { "message" : [ "Data source '' not found. Please make sure the data source is registered." @@ -1480,12 +1474,6 @@ }, "sqlState" : "42K0B" }, - "INCORRECT_END_OFFSET" : { - "message" : [ - "Max offset with rowsPerSecond is , but it's now." - ], - "sqlState" : "22003" - }, "INCORRECT_RAMP_UP_RATE" : { "message" : [ "Max offset with rowsPerSecond is , but 'rampUpTimeSeconds' is ." @@ -1906,11 +1894,6 @@ "Operation not found." ] }, - "SESSION_ALREADY_EXISTS" : { - "message" : [ - "Session already exists." - ] - }, "SESSION_CLOSED" : { "message" : [ "Session was closed." @@ -6065,11 +6048,6 @@ "." ] }, - "_LEGACY_ERROR_TEMP_2142" : { - "message" : [ - "Attributes for type is not supported." - ] - }, "_LEGACY_ERROR_TEMP_2144" : { "message" : [ "Unable to find constructor for . This could happen if is an interface, or a trait without companion object constructor." @@ -6920,11 +6898,6 @@ ": " ] }, - "_LEGACY_ERROR_TEMP_3066" : { - "message" : [ - "" - ] - }, "_LEGACY_ERROR_TEMP_3067" : { "message" : [ "Streaming aggregation doesn't support group aggregate pandas UDF" @@ -6980,11 +6953,6 @@ "More than one event time columns are available. Please ensure there is at most one event time column per stream. event time columns: " ] }, - "_LEGACY_ERROR_TEMP_3078" : { - "message" : [ - "Can not match ParquetTable in the query." - ] - }, "_LEGACY_ERROR_TEMP_3079" : { "message" : [ "Dynamic partition cannot be the parent of a static partition." diff --git a/docs/sql-error-conditions-invalid-handle-error-class.md b/docs/sql-error-conditions-invalid-handle-error-class.md index 14526cd53724..8df8e54a8d9d 100644 --- a/docs/sql-error-conditions-invalid-handle-error-class.md +++ b/docs/sql-error-conditions-invalid-handle-error-class.md @@ -41,10 +41,6 @@ Operation already exists. Operation not found. -## SESSION_ALREADY_EXISTS - -Session already exists. - ## SESSION_CLOSED Session was closed. diff --git a/docs/sql-error-conditions-sqlstates.md b/docs/sql-error-conditions-sqlstates.md index 49cfb56b3662..85f1c5c69c33 100644 --- a/docs/sql-error-conditions-sqlstates.md +++ b/docs/sql-error-conditions-sqlstates.md @@ -71,7 +71,7 @@ Spark SQL uses the following `SQLSTATE` classes: - ARITHMETIC_OVERFLOW, CAST_OVERFLOW, CAST_OVERFLOW_IN_TABLE_INSERT, DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION, INVALID_INDEX_OF_ZERO, INCORRECT_END_OFFSET, INCORRECT_RAMP_UP_RATE, INVALID_ARRAY_INDEX, INVALID_ARRAY_INDEX_IN_ELEMENT_AT, NUMERIC_OUT_OF_SUPPORTED_RANGE, NUMERIC_VALUE_OUT_OF_RANGE + ARITHMETIC_OVERFLOW, CAST_OVERFLOW, CAST_OVERFLOW_IN_TABLE_INSERT, DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION, INVALID_INDEX_OF_ZERO, INCORRECT_RAMP_UP_RATE, INVALID_ARRAY_INDEX, INVALID_ARRAY_INDEX_IN_ELEMENT_AT, NUMERIC_OUT_OF_SUPPORTED_RANGE, NUMERIC_VALUE_OUT_OF_RANGE diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md index a8d2b6c894bc..248839666ef2 100644 --- a/docs/sql-error-conditions.md +++ b/docs/sql-error-conditions.md @@ -474,12 +474,6 @@ For more details see [DATATYPE_MISMATCH](sql-error-conditions-datatype-mismatch- DataType `` requires a length parameter, for example ``(10). Please specify the length. -### DATA_SOURCE_ALREADY_EXISTS - -[SQLSTATE: 42710](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Data source '``' already exists in the registry. Please use a different name for the new data source. - ### DATA_SOURCE_NOT_EXIST [SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) @@ -886,12 +880,6 @@ You may get a different result due to the upgrading to For more details see [INCONSISTENT_BEHAVIOR_CROSS_VERSION](sql-error-conditions-inconsistent-behavior-cross-version-error-class.html) -### INCORRECT_END_OFFSET - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Max offset with `` rowsPerSecond is ``, but it's `` now. - ### INCORRECT_RAMP_UP_RATE [SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) diff --git a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala index b30f7b7a00e9..456a311efda2 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala @@ -27,7 +27,7 @@ import org.apache.spark.unsafe.types.UTF8String /** * Object for grouping error messages from (most) exceptions thrown during query execution. * This does not include exceptions thrown during the eager execution of commands, which are - * grouped into [[QueryCompilationErrors]]. + * grouped into [[CompilationErrors]]. */ private[sql] object DataTypeErrors extends DataTypeErrorsBase { def unsupportedOperationExceptionError(): SparkUnsupportedOperationException = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index ee41cbe2f50e..e8235fd10466 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -3856,12 +3856,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat "reason" -> reason)) } - def dataSourceAlreadyExists(name: String): Throwable = { - new AnalysisException( - errorClass = "DATA_SOURCE_ALREADY_EXISTS", - messageParameters = Map("provider" -> name)) - } - def dataSourceDoesNotExist(name: String): Throwable = { new AnalysisException( errorClass = "DATA_SOURCE_NOT_EXIST", diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala index 1ee20a98cfd1..ba01f9559161 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala @@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.util.ResolveDefaultColumns import org.apache.spark.sql.connector.catalog.SupportsNamespaces.PROP_OWNER +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.util.Utils @@ -569,8 +570,10 @@ abstract class ExternalCatalogSuite extends SparkFunSuite { // then be caught and converted to a RuntimeException with a descriptive message. case ex: RuntimeException if ex.getMessage.contains("MetaException") => throw new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_3066", - messageParameters = Map("msg" -> ex.getMessage)) + errorClass = "_LEGACY_ERROR_TEMP_2193", + messageParameters = Map( + "hiveMetastorePartitionPruningFallbackOnException" -> + SQLConf.HIVE_METASTORE_PARTITION_PRUNING_FALLBACK_ON_EXCEPTION.key)) } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala index 583d7fd7ee3b..7fa34cfddbf0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala @@ -160,7 +160,9 @@ class QueryExecutionSuite extends SharedSparkSession { // Throw an AnalysisException - this should be captured. spark.experimental.extraStrategies = Seq[SparkStrategy]( - (_: LogicalPlan) => throw new AnalysisException("_LEGACY_ERROR_TEMP_3078", Map.empty)) + (_: LogicalPlan) => throw new AnalysisException( + "UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY", + messageParameters = Map("dataSourceType" -> "XXX"))) assert(qe.toString.contains("org.apache.spark.sql.AnalysisException")) // Throw an Error - this should not be captured. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala index da2705f7c72b..c27b71ac8278 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala @@ -2344,9 +2344,7 @@ class ParquetV2FilterSuite extends ParquetFilterSuite { checker(stripSparkFilter(query), expected) - case _ => - throw new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_3078", messageParameters = Map.empty) + case _ => assert(false, "Can not match ParquetTable in the query.") } } }