diff --git a/sql/api/pom.xml b/sql/api/pom.xml
index 9b7917e03434..41a5b85d4c67 100644
--- a/sql/api/pom.xml
+++ b/sql/api/pom.xml
@@ -40,6 +40,11 @@
spark-common-utils_${scala.binary.version}
${project.version}
+
+ org.apache.spark
+ spark-unsafe_${scala.binary.version}
+ ${project.version}
+
target/scala-${scala.binary.version}/classes
diff --git a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
new file mode 100644
index 000000000000..02e8b12c707e
--- /dev/null
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.errors
+
+import org.apache.spark.{SparkArithmeticException, SparkException, SparkRuntimeException, SparkUnsupportedOperationException}
+import org.apache.spark.unsafe.types.UTF8String
+
+/**
+ * Object for grouping error messages from (most) exceptions thrown during query execution.
+ * This does not include exceptions thrown during the eager execution of commands, which are
+ * grouped into [[QueryCompilationErrors]].
+ */
+private[sql] object DataTypeErrors {
+ def unsupportedOperationExceptionError(): SparkUnsupportedOperationException = {
+ new SparkUnsupportedOperationException(
+ errorClass = "_LEGACY_ERROR_TEMP_2225",
+ messageParameters = Map.empty)
+ }
+
+ def decimalPrecisionExceedsMaxPrecisionError(
+ precision: Int, maxPrecision: Int): SparkArithmeticException = {
+ new SparkArithmeticException(
+ errorClass = "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
+ messageParameters = Map(
+ "precision" -> precision.toString,
+ "maxPrecision" -> maxPrecision.toString
+ ),
+ context = Array.empty,
+ summary = "")
+ }
+
+ def unsupportedRoundingMode(roundMode: BigDecimal.RoundingMode.Value): SparkException = {
+ SparkException.internalError(s"Not supported rounding mode: ${roundMode.toString}.")
+ }
+
+ def outOfDecimalTypeRangeError(str: UTF8String): SparkArithmeticException = {
+ new SparkArithmeticException(
+ errorClass = "NUMERIC_OUT_OF_SUPPORTED_RANGE",
+ messageParameters = Map(
+ "value" -> str.toString),
+ context = Array.empty,
+ summary = "")
+ }
+
+ def unsupportedJavaTypeError(clazz: Class[_]): SparkRuntimeException = {
+ new SparkRuntimeException(
+ errorClass = "_LEGACY_ERROR_TEMP_2121",
+ messageParameters = Map("clazz" -> clazz.toString()))
+ }
+
+ def nullLiteralsCannotBeCastedError(name: String): SparkUnsupportedOperationException = {
+ new SparkUnsupportedOperationException(
+ errorClass = "_LEGACY_ERROR_TEMP_2226",
+ messageParameters = Map(
+ "name" -> name))
+ }
+
+ def notUserDefinedTypeError(name: String, userClass: String): Throwable = {
+ new SparkException(
+ errorClass = "_LEGACY_ERROR_TEMP_2227",
+ messageParameters = Map(
+ "name" -> name,
+ "userClass" -> userClass),
+ cause = null)
+ }
+
+ def cannotLoadUserDefinedTypeError(name: String, userClass: String): Throwable = {
+ new SparkException(
+ errorClass = "_LEGACY_ERROR_TEMP_2228",
+ messageParameters = Map(
+ "name" -> name,
+ "userClass" -> userClass),
+ cause = null)
+ }
+
+ def unsupportedArrayTypeError(clazz: Class[_]): SparkRuntimeException = {
+ new SparkRuntimeException(
+ errorClass = "_LEGACY_ERROR_TEMP_2120",
+ messageParameters = Map("clazz" -> clazz.toString()))
+ }
+}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 59b66bd4343e..56c34356dc46 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -515,7 +515,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
}
def unsupportedRoundingMode(roundMode: BigDecimal.RoundingMode.Value): SparkException = {
- SparkException.internalError(s"Not supported rounding mode: ${roundMode.toString}.")
+ DataTypeErrors.unsupportedRoundingMode(roundMode)
}
def resolveCannotHandleNestedSchema(plan: LogicalPlan): SparkRuntimeException = {
@@ -1265,14 +1265,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
def decimalPrecisionExceedsMaxPrecisionError(
precision: Int, maxPrecision: Int): SparkArithmeticException = {
- new SparkArithmeticException(
- errorClass = "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
- messageParameters = Map(
- "precision" -> precision.toString,
- "maxPrecision" -> maxPrecision.toString
- ),
- context = Array.empty,
- summary = "")
+ DataTypeErrors.decimalPrecisionExceedsMaxPrecisionError(precision, maxPrecision)
}
def outOfDecimalTypeRangeError(str: UTF8String): SparkArithmeticException = {
@@ -1285,15 +1278,11 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
}
def unsupportedArrayTypeError(clazz: Class[_]): SparkRuntimeException = {
- new SparkRuntimeException(
- errorClass = "_LEGACY_ERROR_TEMP_2120",
- messageParameters = Map("clazz" -> clazz.toString()))
+ DataTypeErrors.unsupportedJavaTypeError(clazz)
}
def unsupportedJavaTypeError(clazz: Class[_]): SparkRuntimeException = {
- new SparkRuntimeException(
- errorClass = "_LEGACY_ERROR_TEMP_2121",
- messageParameters = Map("clazz" -> clazz.toString()))
+ DataTypeErrors.unsupportedJavaTypeError(clazz)
}
def failedParsingStructTypeError(raw: String): SparkRuntimeException = {
@@ -2169,34 +2158,19 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
}
def unsupportedOperationExceptionError(): SparkUnsupportedOperationException = {
- new SparkUnsupportedOperationException(
- errorClass = "_LEGACY_ERROR_TEMP_2225",
- messageParameters = Map.empty)
+ DataTypeErrors.unsupportedOperationExceptionError()
}
def nullLiteralsCannotBeCastedError(name: String): SparkUnsupportedOperationException = {
- new SparkUnsupportedOperationException(
- errorClass = "_LEGACY_ERROR_TEMP_2226",
- messageParameters = Map(
- "name" -> name))
+ DataTypeErrors.nullLiteralsCannotBeCastedError(name)
}
def notUserDefinedTypeError(name: String, userClass: String): Throwable = {
- new SparkException(
- errorClass = "_LEGACY_ERROR_TEMP_2227",
- messageParameters = Map(
- "name" -> name,
- "userClass" -> userClass),
- cause = null)
+ DataTypeErrors.notUserDefinedTypeError(name, userClass)
}
def cannotLoadUserDefinedTypeError(name: String, userClass: String): Throwable = {
- new SparkException(
- errorClass = "_LEGACY_ERROR_TEMP_2228",
- messageParameters = Map(
- "name" -> name,
- "userClass" -> userClass),
- cause = null)
+ DataTypeErrors.cannotLoadUserDefinedTypeError(name, userClass)
}
def notPublicClassError(name: String): SparkUnsupportedOperationException = {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
index c1483e719b52..67f634f8379c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
@@ -18,7 +18,7 @@
package org.apache.spark.sql.types
import org.apache.spark.annotation.Stable
-import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.errors.DataTypeErrors
/**
* A non-concrete data type, reserved for internal uses.
@@ -107,7 +107,7 @@ protected[sql] object AnyDataType extends AbstractDataType with Serializable {
// Note that since AnyDataType matches any concrete types, defaultConcreteType should never
// be invoked.
override private[sql] def defaultConcreteType: DataType =
- throw QueryExecutionErrors.unsupportedOperationExceptionError()
+ throw DataTypeErrors.unsupportedOperationExceptionError()
override private[sql] def simpleString: String = "any"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 2c0b6677541f..f1529285294e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -23,6 +23,7 @@ import scala.util.Try
import org.apache.spark.annotation.Unstable
import org.apache.spark.sql.catalyst.trees.SQLQueryContext
+import org.apache.spark.sql.errors.DataTypeErrors
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.unsafe.types.UTF8String
@@ -120,7 +121,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
DecimalType.checkNegativeScale(scale)
this.decimalVal = decimal.setScale(scale, ROUND_HALF_UP)
if (decimalVal.precision > precision) {
- throw QueryExecutionErrors.decimalPrecisionExceedsMaxPrecisionError(
+ throw DataTypeErrors.decimalPrecisionExceedsMaxPrecisionError(
decimalVal.precision, precision)
}
this.longVal = 0L
@@ -382,7 +383,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
case ROUND_FLOOR => if (lv < 0) -1L else 0L
case ROUND_CEILING => if (lv > 0) 1L else 0L
case ROUND_HALF_UP | ROUND_HALF_EVEN => 0L
- case _ => throw QueryExecutionErrors.unsupportedRoundingMode(roundMode)
+ case _ => throw DataTypeErrors.unsupportedRoundingMode(roundMode)
}
} else {
val pow10diff = POW_10(diff)
@@ -408,7 +409,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
lv += (if (droppedDigits < 0) -1L else 1L)
}
case _ =>
- throw QueryExecutionErrors.unsupportedRoundingMode(roundMode)
+ throw DataTypeErrors.unsupportedRoundingMode(roundMode)
}
}
} else if (scale > _scale) {
@@ -622,7 +623,7 @@ object Decimal {
// For example: Decimal("6.0790316E+25569151")
if (numDigitsInIntegralPart(bigDecimal) > DecimalType.MAX_PRECISION &&
!SQLConf.get.allowNegativeScaleOfDecimalEnabled) {
- throw QueryExecutionErrors.outOfDecimalTypeRangeError(str)
+ throw DataTypeErrors.outOfDecimalTypeRangeError(str)
} else {
Decimal(bigDecimal)
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
index 9782f140335b..49ac217f1bd2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
@@ -22,7 +22,7 @@ import java.util.Locale
import scala.annotation.tailrec
import org.apache.spark.annotation.Stable
-import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
+import org.apache.spark.sql.errors.{DataTypeErrors, QueryCompilationErrors}
import org.apache.spark.sql.internal.SQLConf
/**
@@ -48,7 +48,7 @@ case class DecimalType(precision: Int, scale: Int) extends FractionalType {
}
if (precision > DecimalType.MAX_PRECISION) {
- throw QueryExecutionErrors.decimalPrecisionExceedsMaxPrecisionError(
+ throw DataTypeErrors.decimalPrecisionExceedsMaxPrecisionError(
precision, DecimalType.MAX_PRECISION)
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
index 3e05eda3443d..4e7ac996d31e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
@@ -23,7 +23,7 @@ import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.annotation.Stable
-import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.errors.DataTypeErrors
/**
@@ -163,13 +163,13 @@ object Metadata {
builder.putMetadataArray(
key, value.asInstanceOf[List[JObject]].map(fromJObject).toArray)
case other =>
- throw QueryExecutionErrors.unsupportedArrayTypeError(other.getClass)
+ throw DataTypeErrors.unsupportedArrayTypeError(other.getClass)
}
}
case (key, JNull) =>
builder.putNull(key)
case (key, other) =>
- throw QueryExecutionErrors.unsupportedJavaTypeError(other.getClass)
+ throw DataTypeErrors.unsupportedJavaTypeError(other.getClass)
}
builder.build()
}
@@ -196,7 +196,7 @@ object Metadata {
case x: Metadata =>
toJsonValue(x.map)
case other =>
- throw QueryExecutionErrors.unsupportedJavaTypeError(other.getClass)
+ throw DataTypeErrors.unsupportedJavaTypeError(other.getClass)
}
}
@@ -223,7 +223,7 @@ object Metadata {
case null =>
0
case other =>
- throw QueryExecutionErrors.unsupportedJavaTypeError(other.getClass)
+ throw DataTypeErrors.unsupportedJavaTypeError(other.getClass)
}
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala
index 73a8a65c7094..855421678545 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala
@@ -17,11 +17,11 @@
package org.apache.spark.sql.types
-import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.errors.DataTypeErrors
object ObjectType extends AbstractDataType {
override private[sql] def defaultConcreteType: DataType =
- throw QueryExecutionErrors.nullLiteralsCannotBeCastedError(ObjectType.simpleString)
+ throw DataTypeErrors.nullLiteralsCannotBeCastedError(ObjectType.simpleString)
override private[sql] def acceptsType(other: DataType): Boolean = other match {
case ObjectType(_) => true
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala
index a6cd77b99c95..293687a4d61d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala
@@ -21,7 +21,7 @@ import scala.collection.mutable
import org.apache.spark.annotation.{DeveloperApi, Since}
import org.apache.spark.internal.Logging
-import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.errors.DataTypeErrors
import org.apache.spark.util.Utils
/**
@@ -78,10 +78,10 @@ object UDTRegistration extends Serializable with Logging {
if (classOf[UserDefinedType[_]].isAssignableFrom(udtClass)) {
udtClass
} else {
- throw QueryExecutionErrors.notUserDefinedTypeError(udtClass.getName, userClass)
+ throw DataTypeErrors.notUserDefinedTypeError(udtClass.getName, userClass)
}
} else {
- throw QueryExecutionErrors.cannotLoadUserDefinedTypeError(udtClassName, userClass)
+ throw DataTypeErrors.cannotLoadUserDefinedTypeError(udtClassName, userClass)
}
}
}