diff --git a/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java b/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java index fc8689354274f..ecbb0ccb4d200 100644 --- a/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java +++ b/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java @@ -19,6 +19,7 @@ import java.io.IOException; +import org.apache.spark.errors.SparkCoreErrors; import org.apache.spark.unsafe.array.LongArray; import org.apache.spark.unsafe.memory.MemoryBlock; @@ -153,9 +154,6 @@ private void throwOom(final MemoryBlock page, final long required) { taskMemoryManager.freePage(page, this); } taskMemoryManager.showMemoryUsage(); - // checkstyle.off: RegexpSinglelineJava - throw new SparkOutOfMemoryError("UNABLE_TO_ACQUIRE_MEMORY", - new String[]{Long.toString(required), Long.toString(got)}); - // checkstyle.on: RegexpSinglelineJava + throw SparkCoreErrors.outOfMemoryError(required, got); } } diff --git a/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java b/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java index 3ff3094456ef1..cfbf2e574787d 100644 --- a/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java +++ b/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java @@ -20,6 +20,8 @@ import org.apache.spark.SparkThrowableHelper; import org.apache.spark.annotation.Private; +import java.util.Map; + /** * This exception is thrown when a task can not acquire memory from the Memory manager. * Instead of throwing {@link OutOfMemoryError}, which kills the executor, @@ -28,7 +30,7 @@ @Private public final class SparkOutOfMemoryError extends OutOfMemoryError implements SparkThrowable { String errorClass; - String[] messageParameters; + Map messageParameters; public SparkOutOfMemoryError(String s) { super(s); @@ -38,7 +40,7 @@ public SparkOutOfMemoryError(OutOfMemoryError e) { super(e.getMessage()); } - public SparkOutOfMemoryError(String errorClass, String[] messageParameters) { + public SparkOutOfMemoryError(String errorClass, Map messageParameters) { super(SparkThrowableHelper.getMessage(errorClass, null, messageParameters)); this.errorClass = errorClass; this.messageParameters = messageParameters; @@ -46,7 +48,7 @@ public SparkOutOfMemoryError(String errorClass, String[] messageParameters) { @Override public String[] getMessageParameters() { - return messageParameters; + return SparkThrowableHelper.getMessageParameters(errorClass, null, messageParameters); } @Override diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 29f1f4f0b3007..24e3e4361f4ae 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -484,7 +484,7 @@ }, "UNRESOLVED_MAP_KEY" : { "message" : [ - "Cannot resolve column as a map key. If the key is a string literal, please add single quotes around it." + "Cannot resolve column as a map key. If the key is a string literal, please add single quotes around it." ], "subClass" : { "WITHOUT_SUGGESTION" : { diff --git a/core/src/main/scala/org/apache/spark/SparkException.scala b/core/src/main/scala/org/apache/spark/SparkException.scala index aea796685ee4b..dba6ef9347ff0 100644 --- a/core/src/main/scala/org/apache/spark/SparkException.scala +++ b/core/src/main/scala/org/apache/spark/SparkException.scala @@ -17,7 +17,7 @@ package org.apache.spark -import java.io.{FileNotFoundException, IOException} +import java.io.FileNotFoundException import java.sql.{SQLException, SQLFeatureNotSupportedException} import java.time.DateTimeException import java.util.ConcurrentModificationException @@ -29,7 +29,7 @@ class SparkException( cause: Throwable, errorClass: Option[String], errorSubClass: Option[String], - messageParameters: Array[String], + messageParameters: Map[String, String], context: Array[QueryContext] = Array.empty) extends Exception(message, cause) with SparkThrowable { @@ -37,23 +37,24 @@ class SparkException( message: String, cause: Throwable, errorClass: Option[String], - messageParameters: Array[String]) = - this(message = message, - cause = cause, - errorClass = errorClass, - errorSubClass = None, - messageParameters = messageParameters) + messageParameters: Map[String, String]) = + this( + message = message, + cause = cause, + errorClass = errorClass, + errorSubClass = None, + messageParameters = messageParameters) def this(message: String, cause: Throwable) = this(message = message, cause = cause, errorClass = None, errorSubClass = None, - messageParameters = Array.empty) + messageParameters = Map.empty) def this(message: String) = this(message = message, cause = null) def this( errorClass: String, - messageParameters: Array[String], + messageParameters: Map[String, String], cause: Throwable, context: Array[QueryContext], summary: String) = @@ -65,7 +66,7 @@ class SparkException( messageParameters = messageParameters, context) - def this(errorClass: String, messageParameters: Array[String], cause: Throwable) = + def this(errorClass: String, messageParameters: Map[String, String], cause: Throwable) = this( message = SparkThrowableHelper.getMessage(errorClass, null, messageParameters), cause = cause, @@ -76,7 +77,7 @@ class SparkException( def this( errorClass: String, errorSubClass: String, - messageParameters: Array[String], + messageParameters: Map[String, String], cause: Throwable) = this( message = SparkThrowableHelper.getMessage(errorClass, errorSubClass, messageParameters), @@ -85,7 +86,12 @@ class SparkException( errorSubClass = Some(errorSubClass), messageParameters = messageParameters) - override def getMessageParameters: Array[String] = messageParameters + override def getMessageParameters: Array[String] = { + errorClass.map { ec => + SparkThrowableHelper.getMessageParameters(ec, errorSubClass.orNull, messageParameters) + }.getOrElse(Array.empty) + } + override def getErrorClass: String = errorClass.orNull override def getErrorSubClass: String = errorSubClass.orNull override def getQueryContext: Array[QueryContext] = context @@ -93,11 +99,17 @@ class SparkException( object SparkException { def internalError(msg: String): SparkException = { - new SparkException(errorClass = "INTERNAL_ERROR", messageParameters = Array(msg), cause = null) + new SparkException( + errorClass = "INTERNAL_ERROR", + messageParameters = Map("message" -> msg), + cause = null) } def internalError(msg: String, cause: Throwable): SparkException = { - new SparkException(errorClass = "INTERNAL_ERROR", messageParameters = Array(msg), cause = cause) + new SparkException( + errorClass = "INTERNAL_ERROR", + messageParameters = Map("message" -> msg), + cause = cause) } } @@ -128,13 +140,16 @@ private[spark] case class ExecutorDeadException(message: String) private[spark] class SparkUpgradeException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String], + messageParameters: Map[String, String], cause: Throwable) - extends RuntimeException(SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, - messageParameters), cause) - with SparkThrowable { + extends RuntimeException( + SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters), cause) + with SparkThrowable { + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull} @@ -144,14 +159,17 @@ private[spark] class SparkUpgradeException( private[spark] class SparkArithmeticException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String], + messageParameters: Map[String, String], context: Array[QueryContext], summary: String) extends ArithmeticException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary)) - with SparkThrowable { + with SparkThrowable { + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull override def getQueryContext: Array[QueryContext] = context @@ -163,21 +181,24 @@ private[spark] class SparkArithmeticException( private[spark] class SparkUnsupportedOperationException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String]) + messageParameters: Map[String, String]) extends UnsupportedOperationException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters)) - with SparkThrowable { + with SparkThrowable { def this( errorClass: String, errorSubClass: String, - messageParameters: Array[String]) = + messageParameters: Map[String, String]) = this( errorClass = errorClass, errorSubClass = Some(errorSubClass), messageParameters = messageParameters) - override def getMessageParameters: Array[String] = messageParameters + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } + override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull } @@ -188,13 +209,16 @@ private[spark] class SparkUnsupportedOperationException( private[spark] class SparkClassNotFoundException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String], + messageParameters: Map[String, String], cause: Throwable = null) extends ClassNotFoundException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters), cause) - with SparkThrowable { + with SparkThrowable { + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull} @@ -204,13 +228,16 @@ private[spark] class SparkClassNotFoundException( private[spark] class SparkConcurrentModificationException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String], + messageParameters: Map[String, String], cause: Throwable = null) extends ConcurrentModificationException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters), cause) - with SparkThrowable { + with SparkThrowable { + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull} @@ -220,14 +247,17 @@ private[spark] class SparkConcurrentModificationException( private[spark] class SparkDateTimeException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String], + messageParameters: Map[String, String], context: Array[QueryContext], summary: String) extends DateTimeException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary)) - with SparkThrowable { + with SparkThrowable { + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull override def getQueryContext: Array[QueryContext] = context @@ -239,12 +269,15 @@ private[spark] class SparkDateTimeException( private[spark] class SparkFileAlreadyExistsException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String]) + messageParameters: Map[String, String]) extends FileAlreadyExistsException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters)) - with SparkThrowable { + with SparkThrowable { + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull} @@ -254,12 +287,15 @@ private[spark] class SparkFileAlreadyExistsException( private[spark] class SparkFileNotFoundException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String]) + messageParameters: Map[String, String]) extends FileNotFoundException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters)) - with SparkThrowable { + with SparkThrowable { + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull} @@ -269,102 +305,84 @@ private[spark] class SparkFileNotFoundException( private[spark] class SparkNumberFormatException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String], + messageParameters: Map[String, String], context: Array[QueryContext], summary: String) extends NumberFormatException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary)) - with SparkThrowable { + with SparkThrowable { + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull override def getQueryContext: Array[QueryContext] = context } -/** - * No such method exception thrown from Spark with an error class. - */ -private[spark] class SparkNoSuchMethodException( - errorClass: String, - errorSubClass: Option[String] = None, - messageParameters: Array[String]) - extends NoSuchMethodException( - SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters)) - with SparkThrowable { - - override def getMessageParameters: Array[String] = messageParameters - override def getErrorClass: String = errorClass - override def getErrorSubClass: String = errorSubClass.orNull} - /** * Illegal argument exception thrown from Spark with an error class. */ private[spark] class SparkIllegalArgumentException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String], + messageParameters: Map[String, String], context: Array[QueryContext] = Array.empty, summary: String = "") extends IllegalArgumentException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary)) - with SparkThrowable { - - override def getMessageParameters: Array[String] = messageParameters - override def getErrorClass: String = errorClass - override def getErrorSubClass: String = errorSubClass.orNull - override def getQueryContext: Array[QueryContext] = context -} + with SparkThrowable { -/** - * IO exception thrown from Spark with an error class. - */ -private[spark] class SparkIOException( - errorClass: String, - errorSubClass: Option[String] = None, - messageParameters: Array[String]) - extends IOException( - SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters)) - with SparkThrowable { + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull + override def getQueryContext: Array[QueryContext] = context } private[spark] class SparkRuntimeException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String], + messageParameters: Map[String, String], cause: Throwable = null, context: Array[QueryContext] = Array.empty, summary: String = "") extends RuntimeException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary), cause) - with SparkThrowable { - - def this(errorClass: String, - errorSubClass: String, - messageParameters: Array[String], - cause: Throwable, - context: Array[QueryContext]) - = this(errorClass = errorClass, - errorSubClass = Some(errorSubClass), - messageParameters = messageParameters, - cause = cause, - context = context) - - def this(errorClass: String, - errorSubClass: String, - messageParameters: Array[String]) - = this(errorClass = errorClass, - errorSubClass = Some(errorSubClass), - messageParameters = messageParameters, - cause = null, - context = Array.empty[QueryContext]) - - override def getMessageParameters: Array[String] = messageParameters + with SparkThrowable { + + def this( + errorClass: String, + errorSubClass: String, + messageParameters: Map[String, String], + cause: Throwable, + context: Array[QueryContext]) + = this( + errorClass = errorClass, + errorSubClass = Some(errorSubClass), + messageParameters = messageParameters, + cause = cause, + context = context) + + def this( + errorClass: String, + errorSubClass: String, + messageParameters: Map[String, String]) + = this( + errorClass = errorClass, + errorSubClass = Some(errorSubClass), + messageParameters = messageParameters, + cause = null, + context = Array.empty[QueryContext]) + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } + override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull override def getQueryContext: Array[QueryContext] = context @@ -376,12 +394,15 @@ private[spark] class SparkRuntimeException( private[spark] class SparkSecurityException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String]) + messageParameters: Map[String, String]) extends SecurityException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters)) - with SparkThrowable { + with SparkThrowable { + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull } @@ -392,14 +413,17 @@ private[spark] class SparkSecurityException( private[spark] class SparkArrayIndexOutOfBoundsException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String], + messageParameters: Map[String, String], context: Array[QueryContext], summary: String) extends ArrayIndexOutOfBoundsException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary)) - with SparkThrowable { + with SparkThrowable { + + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull override def getQueryContext: Array[QueryContext] = context @@ -410,40 +434,24 @@ private[spark] class SparkArrayIndexOutOfBoundsException( */ private[spark] class SparkSQLException( errorClass: String, - errorSubClass: Option[String] = None, - messageParameters: Array[String]) + errorSubClass: Option[String], + messageParameters: Map[String, String]) extends SQLException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters)) - with SparkThrowable { + with SparkThrowable { - def this(errorClass: String, messageParameters: Array[String]) = + def this(errorClass: String, messageParameters: Map[String, String]) = this( errorClass = errorClass, errorSubClass = None, messageParameters = messageParameters) - override def getMessageParameters: Array[String] = messageParameters - override def getErrorClass: String = errorClass - override def getErrorSubClass: String = errorSubClass.orNull -} - -/** - * No such element exception thrown from Spark with an error class. - */ -private[spark] class SparkNoSuchElementException( - errorClass: String, - errorSubClass: Option[String] = None, - messageParameters: Array[String], - context: Array[QueryContext], - summary: String) - extends NoSuchElementException( - SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary)) - with SparkThrowable { + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } - override def getMessageParameters: Array[String] = messageParameters override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull - override def getQueryContext: Array[QueryContext] = context } /** @@ -452,19 +460,24 @@ private[spark] class SparkNoSuchElementException( private[spark] class SparkSQLFeatureNotSupportedException( errorClass: String, errorSubClass: Option[String] = None, - messageParameters: Array[String]) + messageParameters: Map[String, String]) extends SQLFeatureNotSupportedException( SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters)) - with SparkThrowable { + with SparkThrowable { - def this(errorClass: String, - errorSubClass: String, - messageParameters: Array[String]) = - this(errorClass = errorClass, + def this( + errorClass: String, + errorSubClass: String, + messageParameters: Map[String, String]) = + this( + errorClass = errorClass, errorSubClass = Some(errorSubClass), messageParameters = messageParameters) - override def getMessageParameters: Array[String] = messageParameters + override def getMessageParameters: Array[String] = { + SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters) + } + override def getErrorClass: String = errorClass override def getErrorSubClass: String = errorSubClass.orNull } diff --git a/core/src/main/scala/org/apache/spark/SparkThrowableHelper.scala b/core/src/main/scala/org/apache/spark/SparkThrowableHelper.scala index 92128f2b48b87..93e88261cb672 100644 --- a/core/src/main/scala/org/apache/spark/SparkThrowableHelper.scala +++ b/core/src/main/scala/org/apache/spark/SparkThrowableHelper.scala @@ -19,12 +19,14 @@ package org.apache.spark import java.net.URL +import scala.collection.JavaConverters._ import scala.collection.immutable.SortedMap import com.fasterxml.jackson.annotation.JsonIgnore import com.fasterxml.jackson.core.`type`.TypeReference import com.fasterxml.jackson.databind.json.JsonMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule +import org.apache.commons.text.StringSubstitutor import org.apache.spark.util.JsonProtocol.toJsonString import org.apache.spark.util.Utils @@ -76,22 +78,66 @@ private[spark] object SparkThrowableHelper { mapper.readValue(errorClassesUrl, new TypeReference[SortedMap[String, ErrorInfo]]() {}) } + def getParameterNames(errorClass: String, errorSubCLass: String): Array[String] = { + val errorInfo = errorClassToInfoMap.getOrElse(errorClass, + throw new IllegalArgumentException(s"Cannot find error class '$errorClass'")) + if (errorInfo.subClass.isEmpty && errorSubCLass != null) { + throw new IllegalArgumentException(s"'$errorClass' has no subclass") + } + if (errorInfo.subClass.isDefined && errorSubCLass == null) { + throw new IllegalArgumentException(s"'$errorClass' requires subclass") + } + var parameterizedMessage = errorInfo.messageFormat + if (errorInfo.subClass.isDefined) { + val givenSubClass = errorSubCLass + val errorSubInfo = errorInfo.subClass.get.getOrElse(givenSubClass, + throw new IllegalArgumentException(s"Cannot find sub error class '$givenSubClass'")) + parameterizedMessage = parameterizedMessage + errorSubInfo.messageFormat + } + val pattern = "<[a-zA-Z0-9_-]+>".r + val matches = pattern.findAllIn(parameterizedMessage) + val parameterSeq = matches.toArray + val parameterNames = parameterSeq.map(p => p.stripPrefix("<").stripSuffix(">")) + parameterNames + } + + def getMessageParameters( + errorClass: String, + errorSubCLass: String, + params: Map[String, String]): Array[String] = { + getParameterNames(errorClass, errorSubCLass).map(params.getOrElse(_, "?")) + } + + def getMessageParameters( + errorClass: String, + errorSubCLass: String, + params: java.util.Map[String, String]): Array[String] = { + getParameterNames(errorClass, errorSubCLass).map(params.getOrDefault(_, "?")) + } + def getMessage( errorClass: String, errorSubClass: String, - messageParameters: Array[String]): String = { + messageParameters: Map[String, String]): String = { getMessage(errorClass, errorSubClass, messageParameters, "") } def getMessage( errorClass: String, errorSubClass: String, - messageParameters: Array[String], + messageParameters: java.util.Map[String, String]): String = { + getMessage(errorClass, errorSubClass, messageParameters.asScala.toMap, "") + } + + def getMessage( + errorClass: String, + errorSubClass: String, + messageParameters: Map[String, String], context: String): String = { val errorInfo = errorClassToInfoMap.getOrElse(errorClass, throw new IllegalArgumentException(s"Cannot find error class '$errorClass'")) - val (displayClass, displayMessageParameters, displayFormat) = if (errorInfo.subClass.isEmpty) { - (errorClass, messageParameters, errorInfo.messageFormat) + val (displayClass, displayFormat) = if (errorInfo.subClass.isEmpty) { + (errorClass, errorInfo.messageFormat) } else { val subClasses = errorInfo.subClass.get if (errorSubClass == null) { @@ -99,40 +145,22 @@ private[spark] object SparkThrowableHelper { } val errorSubInfo = subClasses.getOrElse(errorSubClass, throw new IllegalArgumentException(s"Cannot find sub error class '$errorSubClass'")) - (errorClass + "." + errorSubClass, messageParameters, + (errorClass + "." + errorSubClass, errorInfo.messageFormat + " " + errorSubInfo.messageFormat) } - val displayMessage = String.format( - displayFormat.replaceAll("<[a-zA-Z0-9_-]+>", "%s"), - displayMessageParameters : _*) + val sub = new StringSubstitutor(messageParameters.asJava) + sub.setEnableUndefinedVariableException(true) + val displayMessage = try { + sub.replace(displayFormat.replaceAll("<([a-zA-Z0-9_-]+)>", "\\$\\{$1\\}")) + } catch { + case _: IllegalArgumentException => throw SparkException.internalError( + s"Undefined an error message parameter: $messageParameters") + } val displayQueryContext = (if (context.isEmpty) "" else "\n") + context s"[$displayClass] $displayMessage$displayQueryContext" } - def getParameterNames(errorClass: String, errorSubCLass: String): Array[String] = { - val errorInfo = errorClassToInfoMap.getOrElse(errorClass, - throw new IllegalArgumentException(s"Cannot find error class '$errorClass'")) - if (errorInfo.subClass.isEmpty && errorSubCLass != null) { - throw new IllegalArgumentException(s"'$errorClass' has no subclass") - } - if (errorInfo.subClass.isDefined && errorSubCLass == null) { - throw new IllegalArgumentException(s"'$errorClass' requires subclass") - } - var parameterizedMessage = errorInfo.messageFormat - if (errorInfo.subClass.isDefined) { - val givenSubClass = errorSubCLass - val errorSubInfo = errorInfo.subClass.get.getOrElse(givenSubClass, - throw new IllegalArgumentException(s"Cannot find sub error class '$givenSubClass'")) - parameterizedMessage = parameterizedMessage + errorSubInfo.messageFormat - } - val pattern = "<[a-zA-Z0-9_-]+>".r - val matches = pattern.findAllIn(parameterizedMessage) - val parameterSeq = matches.toArray - val parameterNames = parameterSeq.map(p => p.stripPrefix("<").stripSuffix(">")) - parameterNames - } - def getSqlState(errorClass: String): String = { Option(errorClass).flatMap(errorClassToInfoMap.get).flatMap(_.sqlState).orNull } diff --git a/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala b/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala index 1da0288446233..308ee003d5c2c 100644 --- a/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala +++ b/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala @@ -20,9 +20,12 @@ package org.apache.spark.errors import java.io.IOException import java.util.concurrent.TimeoutException +import scala.collection.JavaConverters._ + import org.apache.hadoop.fs.Path import org.apache.spark.{SparkException, TaskNotSerializableException} +import org.apache.spark.memory.SparkOutOfMemoryError import org.apache.spark.scheduler.{BarrierJobRunWithDynamicAllocationException, BarrierJobSlotsNumberCheckFailed, BarrierJobUnsupportedRDDChainException} import org.apache.spark.shuffle.{FetchFailedException, ShuffleManager} import org.apache.spark.storage.{BlockId, BlockManagerId, BlockNotFoundException, BlockSavedOnDecommissionedBlockManagerException, RDDBlockId, UnrecognizedBlockId} @@ -317,12 +320,24 @@ private[spark] object SparkCoreErrors { } def graphiteSinkInvalidProtocolError(invalidProtocol: String): Throwable = { - new SparkException(errorClass = "GRAPHITE_SINK_INVALID_PROTOCOL", - messageParameters = Array(invalidProtocol), cause = null) + new SparkException( + errorClass = "GRAPHITE_SINK_INVALID_PROTOCOL", + messageParameters = Map("protocol" -> invalidProtocol), + cause = null) } def graphiteSinkPropertyMissingError(missingProperty: String): Throwable = { - new SparkException(errorClass = "GRAPHITE_SINK_PROPERTY_MISSING", - messageParameters = Array(missingProperty), cause = null) + new SparkException( + errorClass = "GRAPHITE_SINK_PROPERTY_MISSING", + messageParameters = Map("property" -> missingProperty), + cause = null) + } + + def outOfMemoryError(requestedBytes: Long, receivedBytes: Long): OutOfMemoryError = { + new SparkOutOfMemoryError( + "UNABLE_TO_ACQUIRE_MEMORY", + Map( + "requestedBytes" -> requestedBytes.toString, + "receivedBytes" -> receivedBytes.toString).asJava) } } diff --git a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala index dda86387f156f..cbf273dc5c857 100644 --- a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala @@ -20,7 +20,6 @@ package org.apache.spark import java.io.File import java.nio.charset.StandardCharsets import java.nio.file.Files -import java.util.IllegalFormatException import com.fasterxml.jackson.annotation.JsonInclude.Include import com.fasterxml.jackson.core.JsonParser.Feature.STRICT_DUPLICATE_DETECTION @@ -146,24 +145,26 @@ class SparkThrowableSuite extends SparkFunSuite { test("Check if error class is missing") { val ex1 = intercept[IllegalArgumentException] { - getMessage("", null, Array.empty) + getMessage("", null, Map.empty[String, String]) } assert(ex1.getMessage == "Cannot find error class ''") val ex2 = intercept[IllegalArgumentException] { - getMessage("LOREM_IPSUM", null, Array.empty) + getMessage("LOREM_IPSUM", null, Map.empty[String, String]) } assert(ex2.getMessage == "Cannot find error class 'LOREM_IPSUM'") } test("Check if message parameters match message format") { // Requires 2 args - intercept[IllegalFormatException] { - getMessage("UNRESOLVED_COLUMN", "WITHOUT_SUGGESTION", Array.empty) + val e = intercept[SparkException] { + getMessage("UNRESOLVED_COLUMN", "WITHOUT_SUGGESTION", Map.empty[String, String]) } + assert(e.getErrorClass === "INTERNAL_ERROR") + assert(e.getMessageParameters.head.contains("Undefined an error message parameter")) // Does not fail with too many args (expects 0 args) - assert(getMessage("DIVIDE_BY_ZERO", null, Array("foo", "bar", "baz")) == + assert(getMessage("DIVIDE_BY_ZERO", null, Map("config" -> "foo", "a" -> "bar")) == "[DIVIDE_BY_ZERO] Division by zero. " + "Use `try_divide` to tolerate divisor being 0 and return NULL instead. " + "If necessary set foo to \"false\" " + @@ -175,7 +176,20 @@ class SparkThrowableSuite extends SparkFunSuite { getMessage( "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`foo`", "`bar`, `baz`") + Map("objectName" -> "`foo`", "proposal" -> "`bar`, `baz`") + ) == + "[UNRESOLVED_COLUMN.WITH_SUGGESTION] A column or function parameter with " + + "name `foo` cannot be resolved. Did you mean one of the following? [`bar`, `baz`]" + ) + + assert( + getMessage( + "UNRESOLVED_COLUMN", + "WITH_SUGGESTION", + Map( + "objectName" -> "`foo`", + "proposal" -> "`bar`, `baz`"), + "" ) == "[UNRESOLVED_COLUMN.WITH_SUGGESTION] A column or function parameter with " + "name `foo` cannot be resolved. Did you mean one of the following? [`bar`, `baz`]" @@ -199,7 +213,7 @@ class SparkThrowableSuite extends SparkFunSuite { try { throw new SparkException( errorClass = "WRITING_JOB_ABORTED", - messageParameters = Array.empty, + messageParameters = Map.empty, cause = null) } catch { case e: SparkThrowable => @@ -215,7 +229,7 @@ class SparkThrowableSuite extends SparkFunSuite { try { throw new SparkException( errorClass = "INTERNAL_ERROR", - messageParameters = Array("this is an internal error"), + messageParameters = Map("message" -> "this is an internal error"), cause = null ) } catch { @@ -240,7 +254,7 @@ class SparkThrowableSuite extends SparkFunSuite { val e = new SparkArithmeticException( errorClass = "DIVIDE_BY_ZERO", errorSubClass = None, - messageParameters = Array("CONFIG"), + messageParameters = Map("config" -> "CONFIG"), context = Array(new TestQueryContext), summary = "Query summary") @@ -283,7 +297,7 @@ class SparkThrowableSuite extends SparkFunSuite { val e2 = new SparkIllegalArgumentException( errorClass = "UNSUPPORTED_SAVE_MODE", errorSubClass = Some("EXISTENT_PATH"), - messageParameters = Array("UNSUPPORTED_MODE")) + messageParameters = Map("saveMode" -> "UNSUPPORTED_MODE")) assert(SparkThrowableHelper.getMessage(e2, STANDARD) === """{ | "errorClass" : "UNSUPPORTED_SAVE_MODE", diff --git a/core/src/test/scala/org/apache/spark/shuffle/sort/ShuffleExternalSorterSuite.scala b/core/src/test/scala/org/apache/spark/shuffle/sort/ShuffleExternalSorterSuite.scala index a810688a50320..5b6fb31d598ac 100644 --- a/core/src/test/scala/org/apache/spark/shuffle/sort/ShuffleExternalSorterSuite.scala +++ b/core/src/test/scala/org/apache/spark/shuffle/sort/ShuffleExternalSorterSuite.scala @@ -107,12 +107,11 @@ class ShuffleExternalSorterSuite extends SparkFunSuite with LocalSparkContext wi // at org.apache.spark.memory.TaskMemoryManager.getPage(TaskMemoryManager.java:384) // - java.lang.UnsupportedOperationException: Cannot grow BufferHolder by size -536870912 // because the size after growing exceeds size limitation 2147483632 - val e = intercept[SparkOutOfMemoryError] { - sorter.insertRecord(bytes, Platform.BYTE_ARRAY_OFFSET, 1, 0) - } - assert(e.getMessage == - "[UNABLE_TO_ACQUIRE_MEMORY] Unable to acquire 800 bytes of memory, got 400") - assert(e.getErrorClass == "UNABLE_TO_ACQUIRE_MEMORY") - assert(e.getSqlState == null) + checkError( + exception = intercept[SparkOutOfMemoryError] { + sorter.insertRecord(bytes, Platform.BYTE_ARRAY_OFFSET, 1, 0) + }, + errorClass = "UNABLE_TO_ACQUIRE_MEMORY", + parameters = Map("requestedBytes" -> "800", "receivedBytes" -> "400")) } } diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala index e77f65fb7d6d3..4a3a33b51f25f 100644 --- a/project/MimaExcludes.scala +++ b/project/MimaExcludes.scala @@ -89,7 +89,14 @@ object MimaExcludes { // [SPARK-40324][SQL] Provide query context in AnalysisException ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.AnalysisException.copy"), - ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.AnalysisException.withPosition") + ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.AnalysisException.withPosition"), + + // [SPARK-40400][SQL] Pass error message parameters to exceptions as a map + ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.sql.AnalysisException.messageParameters"), + ProblemFilters.exclude[IncompatibleResultTypeProblem]("org.apache.spark.sql.AnalysisException.copy$default$7"), + ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.spark.sql.AnalysisException.copy"), + ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.spark.sql.AnalysisException.this"), + ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.spark.SparkException.this") ) // Defulat exclude rules diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala index bb2ed251f3efd..a2c72d7173c48 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala @@ -37,28 +37,33 @@ class AnalysisException protected[sql] ( val cause: Option[Throwable] = None, val errorClass: Option[String] = None, val errorSubClass: Option[String] = None, - val messageParameters: Array[String] = Array.empty, + val messageParameters: Map[String, String] = Map.empty, val context: Array[QueryContext] = Array.empty) extends Exception(message, cause.orNull) with SparkThrowable with Serializable { // Needed for binary compatibility - protected[sql] def this(message: String, - line: Option[Int], - startPosition: Option[Int], - plan: Option[LogicalPlan], - cause: Option[Throwable], - errorClass: Option[String], - messageParameters: Array[String]) = - this(message = message, - line = line, - startPosition = startPosition, - plan = plan, - cause = cause, - errorClass, - errorSubClass = None, - messageParameters = messageParameters) - - def this(errorClass: String, messageParameters: Array[String], cause: Option[Throwable]) = + protected[sql] def this( + message: String, + line: Option[Int], + startPosition: Option[Int], + plan: Option[LogicalPlan], + cause: Option[Throwable], + errorClass: Option[String], + messageParameters: Map[String, String]) = + this( + message = message, + line = line, + startPosition = startPosition, + plan = plan, + cause = cause, + errorClass, + errorSubClass = None, + messageParameters = messageParameters) + + def this( + errorClass: String, + messageParameters: Map[String, String], + cause: Option[Throwable]) = this( SparkThrowableHelper.getMessage(errorClass, null, messageParameters), errorClass = Some(errorClass), @@ -68,7 +73,7 @@ class AnalysisException protected[sql] ( def this( errorClass: String, - messageParameters: Array[String], + messageParameters: Map[String, String], context: Array[QueryContext], summary: String) = this( @@ -79,12 +84,17 @@ class AnalysisException protected[sql] ( cause = null, context = context) - def this(errorClass: String, messageParameters: Array[String]) = - this(errorClass = errorClass, messageParameters = messageParameters, cause = None) + def this( + errorClass: String, + messageParameters: Map[String, String]) = + this( + errorClass = errorClass, + messageParameters = messageParameters, + cause = None) def this( errorClass: String, - messageParameters: Array[String], + messageParameters: Map[String, String], origin: Origin) = this( SparkThrowableHelper.getMessage(errorClass, null, messageParameters), @@ -98,7 +108,7 @@ class AnalysisException protected[sql] ( def this( errorClass: String, errorSubClass: String, - messageParameters: Array[String]) = + messageParameters: Map[String, String]) = this( SparkThrowableHelper.getMessage(errorClass, errorSubClass, messageParameters), errorClass = Some(errorClass), @@ -108,7 +118,7 @@ class AnalysisException protected[sql] ( def this( errorClass: String, errorSubClass: String, - messageParameters: Array[String], + messageParameters: Map[String, String], origin: Origin) = this( SparkThrowableHelper.getMessage(errorClass, errorSubClass, messageParameters), @@ -126,7 +136,7 @@ class AnalysisException protected[sql] ( plan: Option[LogicalPlan] = this.plan, cause: Option[Throwable] = this.cause, errorClass: Option[String] = this.errorClass, - messageParameters: Array[String] = this.messageParameters, + messageParameters: Map[String, String] = this.messageParameters, context: Array[QueryContext] = Array.empty): AnalysisException = new AnalysisException(message, line, startPosition, plan, cause, errorClass, errorSubClass, messageParameters, context) @@ -155,7 +165,12 @@ class AnalysisException protected[sql] ( message } - override def getMessageParameters: Array[String] = messageParameters + override def getMessageParameters: Array[String] = { + errorClass.map { ec => + SparkThrowableHelper.getMessageParameters(ec, errorSubClass.orNull, messageParameters) + }.getOrElse(Array.empty) + } + override def getErrorClass: String = errorClass.orNull override def getErrorSubClass: String = errorSubClass.orNull override def getQueryContext: Array[QueryContext] = context diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCheckResult.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCheckResult.scala index b6448b2f73484..2041eaa9ad56b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCheckResult.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCheckResult.scala @@ -51,7 +51,7 @@ object TypeCheckResult { */ case class DataTypeMismatch( errorSubClass: String, - messageParameters: Array[String]) + messageParameters: Map[String, String]) extends TypeCheckResult { def isSuccess: Boolean = false } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala index be6dcfac269a5..4e32e506c6881 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala @@ -50,7 +50,7 @@ package object analysis { throw new AnalysisException(msg, t.origin.line, t.origin.startPosition, cause = Some(cause)) } - def failAnalysis(errorClass: String, messageParameters: Array[String]): Nothing = { + def failAnalysis(errorClass: String, messageParameters: Map[String, String]): Nothing = { throw new AnalysisException( errorClass = errorClass, messageParameters = messageParameters, @@ -61,7 +61,7 @@ package object analysis { throw new AnalysisException( errorClass = "DATATYPE_MISMATCH", errorSubClass = mismatch.errorSubClass, - messageParameters = toSQLExpr(expr) +: mismatch.messageParameters, + messageParameters = mismatch.messageParameters + ("sqlExpr" -> toSQLExpr(expr)), origin = t.origin) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InvalidUDFClassException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InvalidUDFClassException.scala index 4d83663fd31fb..6b06548e933cb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InvalidUDFClassException.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InvalidUDFClassException.scala @@ -27,11 +27,11 @@ import org.apache.spark.sql.AnalysisException class InvalidUDFClassException private[sql]( message: String, errorClass: Option[String] = None, - messageParameters: Array[String] = Array.empty) + messageParameters: Map[String, String] = Map.empty) extends AnalysisException( message = message, errorClass = errorClass, messageParameters = messageParameters) { - def this(errorClass: String, messageParameters: Array[String]) = + def this(errorClass: String, messageParameters: Map[String, String]) = this( SparkThrowableHelper.getMessage(errorClass, null, messageParameters), Some(errorClass), diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala index a85e7d2efdb3b..f5eae73f94739 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala @@ -763,11 +763,15 @@ abstract class BinaryOperator extends BinaryExpression with ExpectsInputTypes wi if (!left.dataType.sameType(right.dataType)) { DataTypeMismatch( errorSubClass = "BINARY_OP_DIFF_TYPES", - messageParameters = Array(toSQLType(left.dataType), toSQLType(right.dataType))) + messageParameters = Map( + "left" -> toSQLType(left.dataType), + "right" -> toSQLType(right.dataType))) } else if (!inputType.acceptsType(left.dataType)) { DataTypeMismatch( errorSubClass = "BINARY_OP_WRONG_TYPE", - messageParameters = Array(toSQLType(inputType), toSQLType(left.dataType))) + messageParameters = Map( + "inputType" -> toSQLType(inputType), + "actualDataType" -> toSQLType(left.dataType))) } else { TypeCheckResult.TypeCheckSuccess } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala index 5eaef76a74711..4b2ee09804c52 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala @@ -238,7 +238,7 @@ class ParseException( val stop: Origin, errorClass: Option[String] = None, errorSubClass: Option[String] = None, - messageParameters: Array[String] = Array.empty, + messageParameters: Map[String, String] = Map.empty, queryContext: Array[QueryContext] = ParseException.getQueryContext()) extends AnalysisException( message, @@ -257,7 +257,7 @@ class ParseException( ParserUtils.position(ctx.getStop)) } - def this(errorClass: String, messageParameters: Array[String], ctx: ParserRuleContext) = + def this(errorClass: String, messageParameters: Map[String, String], ctx: ParserRuleContext) = this(Option(ParserUtils.command(ctx)), SparkThrowableHelper.getMessage(errorClass, null, messageParameters), ParserUtils.position(ctx.getStart), @@ -266,11 +266,13 @@ class ParseException( None, messageParameters) - def this(errorClass: String, - errorSubClass: String, - messageParameters: Array[String], - ctx: ParserRuleContext) = - this(Option(ParserUtils.command(ctx)), + def this( + errorClass: String, + errorSubClass: String, + messageParameters: Map[String, String], + ctx: ParserRuleContext) = + this( + Option(ParserUtils.command(ctx)), SparkThrowableHelper.getMessage(errorClass, errorSubClass, messageParameters), ParserUtils.position(ctx.getStart), ParserUtils.position(ctx.getStop), @@ -284,7 +286,7 @@ class ParseException( start: Origin, stop: Origin, errorClass: String, - messageParameters: Array[String]) = + messageParameters: Map[String, String]) = this( command, SparkThrowableHelper.getMessage(errorClass, null, messageParameters), @@ -319,7 +321,7 @@ class ParseException( val (cls, subCls, params) = if (errorClass == Some("PARSE_SYNTAX_ERROR") && cmd.trim().isEmpty) { // PARSE_EMPTY_STATEMENT error class overrides the PARSE_SYNTAX_ERROR when cmd is empty - (Some("PARSE_EMPTY_STATEMENT"), None, Array[String]()) + (Some("PARSE_EMPTY_STATEMENT"), None, Map.empty[String, String]) } else { (errorClass, errorSubClass, messageParameters) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/SparkParserErrorStrategy.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/SparkParserErrorStrategy.scala index 1b0b68620737f..9cc8fa8dcf850 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/SparkParserErrorStrategy.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/SparkParserErrorStrategy.scala @@ -30,14 +30,14 @@ class SparkRecognitionException( input: IntStream, ctx: ParserRuleContext, val errorClass: Option[String] = None, - val messageParameters: Array[String] = Array.empty) + val messageParameters: Map[String, String] = Map.empty) extends RecognitionException(message, recognizer, input, ctx) { /** Construct from a given [[RecognitionException]], with additional error information. */ def this( recognitionException: RecognitionException, errorClass: String, - messageParameters: Array[String]) = + messageParameters: Map[String, String]) = this( recognitionException.getMessage, recognitionException.getRecognizer, @@ -50,7 +50,7 @@ class SparkRecognitionException( messageParameters) /** Construct with pure errorClass and messageParameter information. */ - def this(errorClass: String, messageParameters: Array[String]) = + def this(errorClass: String, messageParameters: Map[String, String]) = this("", null, null, null, Some(errorClass), messageParameters) } @@ -75,7 +75,9 @@ class SparkParserErrorStrategy() extends DefaultErrorStrategy { val exceptionWithErrorClass = new SparkRecognitionException( e, "PARSE_SYNTAX_ERROR", - Array(getTokenErrorDisplay(e.getOffendingToken), "")) + messageParameters = Map( + "error" -> getTokenErrorDisplay(e.getOffendingToken), + "hint" -> "")) recognizer.notifyErrorListeners(e.getOffendingToken, "", exceptionWithErrorClass) } @@ -83,7 +85,7 @@ class SparkParserErrorStrategy() extends DefaultErrorStrategy { val exceptionWithErrorClass = new SparkRecognitionException( e, "PARSE_SYNTAX_ERROR", - Array(getTokenErrorDisplay(e.getOffendingToken), "")) + Map("error" -> getTokenErrorDisplay(e.getOffendingToken), "hint" -> "")) recognizer.notifyErrorListeners(e.getOffendingToken, "", exceptionWithErrorClass) } @@ -94,7 +96,7 @@ class SparkParserErrorStrategy() extends DefaultErrorStrategy { val hint = ": extra input " + errorTokenDisplay val exceptionWithErrorClass = new SparkRecognitionException( "PARSE_SYNTAX_ERROR", - Array(errorTokenDisplay, hint)) + Map("error" -> errorTokenDisplay, "hint" -> hint)) recognizer.notifyErrorListeners(recognizer.getCurrentToken, "", exceptionWithErrorClass) } } @@ -105,7 +107,7 @@ class SparkParserErrorStrategy() extends DefaultErrorStrategy { val hint = ": missing " + getExpectedTokens(recognizer).toString(recognizer.getVocabulary) val exceptionWithErrorClass = new SparkRecognitionException( "PARSE_SYNTAX_ERROR", - Array(getTokenErrorDisplay(recognizer.getCurrentToken), hint)) + Map("error" -> getTokenErrorDisplay(recognizer.getCurrentToken), "hint" -> hint)) recognizer.notifyErrorListeners(recognizer.getCurrentToken, "", exceptionWithErrorClass) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index b62ae2a5900c9..c1d8f0a4a8a51 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -29,7 +29,7 @@ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, AttributeReference, AttributeSet, CreateMap, Expression, GroupingID, NamedExpression, SpecifiedWindowFrame, WindowFrame, WindowFunction, WindowSpecDefinition} import org.apache.spark.sql.catalyst.plans.JoinType import org.apache.spark.sql.catalyst.plans.logical.{InsertIntoStatement, Join, LogicalPlan, SerdeInfo, Window} -import org.apache.spark.sql.catalyst.trees.{Origin, SQLQueryContext, TreeNode} +import org.apache.spark.sql.catalyst.trees.{Origin, TreeNode} import org.apache.spark.sql.catalyst.util.{FailFastMode, ParseMode, PermissiveMode} import org.apache.spark.sql.connector.catalog._ import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ @@ -51,51 +51,59 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def groupingIDMismatchError(groupingID: GroupingID, groupByExprs: Seq[Expression]): Throwable = { new AnalysisException( errorClass = "GROUPING_ID_COLUMN_MISMATCH", - messageParameters = Array(groupingID.groupByExprs.mkString(","), groupByExprs.mkString(","))) + messageParameters = Map( + "groupingIdColumn" -> groupingID.groupByExprs.mkString(","), + "groupByColumns" -> groupByExprs.mkString(","))) } def groupingColInvalidError(groupingCol: Expression, groupByExprs: Seq[Expression]): Throwable = { new AnalysisException( errorClass = "GROUPING_COLUMN_MISMATCH", - messageParameters = Array(groupingCol.toString, groupByExprs.mkString(","))) + messageParameters = Map( + "grouping" -> groupingCol.toString, + "groupingColumns" -> groupByExprs.mkString(","))) } def groupingSizeTooLargeError(sizeLimit: Int): Throwable = { new AnalysisException( errorClass = "GROUPING_SIZE_LIMIT_EXCEEDED", - messageParameters = Array(sizeLimit.toString)) + messageParameters = Map("maxSize" -> sizeLimit.toString)) } def zeroArgumentIndexError(): Throwable = { new AnalysisException( errorClass = "INVALID_PARAMETER_VALUE", - messageParameters = Array( - "strfmt", toSQLId("format_string"), "expects %1$, %2$ and so on, but got %0$.")) + messageParameters = Map( + "parameter" -> "strfmt", + "functionName" -> toSQLId("format_string"), + "expected" -> "expects %1$, %2$ and so on, but got %0$.")) } def unorderablePivotColError(pivotCol: Expression): Throwable = { new AnalysisException( errorClass = "INCOMPARABLE_PIVOT_COLUMN", - messageParameters = Array(toSQLId(pivotCol.sql))) + messageParameters = Map("columnName" -> toSQLId(pivotCol.sql))) } def nonLiteralPivotValError(pivotVal: Expression): Throwable = { new AnalysisException( errorClass = "NON_LITERAL_PIVOT_VALUES", - messageParameters = Array(toSQLExpr(pivotVal))) + messageParameters = Map("expression" -> toSQLExpr(pivotVal))) } def pivotValDataTypeMismatchError(pivotVal: Expression, pivotCol: Expression): Throwable = { new AnalysisException( errorClass = "PIVOT_VALUE_DATA_TYPE_MISMATCH", - messageParameters = Array( - pivotVal.toString, pivotVal.dataType.simpleString, pivotCol.dataType.catalogString)) + messageParameters = Map( + "value" -> pivotVal.toString, + "valueType" -> pivotVal.dataType.simpleString, + "pivotType" -> pivotCol.dataType.catalogString)) } def unpivotRequiresValueColumns(): Throwable = { new AnalysisException( errorClass = "UNPIVOT_REQUIRES_VALUE_COLUMNS", - messageParameters = Array.empty) + messageParameters = Map.empty) } def unpivotValDataTypeMismatchError(values: Seq[NamedExpression]): Throwable = { @@ -108,45 +116,49 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { new AnalysisException( errorClass = "UNPIVOT_VALUE_DATA_TYPE_MISMATCH", - messageParameters = Array(dataTypes.mkString(", "))) + messageParameters = Map("types" -> dataTypes.mkString(", "))) } def unsupportedIfNotExistsError(tableName: String): Throwable = { new AnalysisException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "INSERT_PARTITION_SPEC_IF_NOT_EXISTS", - messageParameters = Array(toSQLId(tableName))) + messageParameters = Map("tableName" -> toSQLId(tableName))) } def nonPartitionColError(partitionName: String): Throwable = { new AnalysisException( errorClass = "NON_PARTITION_COLUMN", - messageParameters = Array(toSQLId(partitionName))) + messageParameters = Map("columnName" -> toSQLId(partitionName))) } def missingStaticPartitionColumn(staticName: String): Throwable = { new AnalysisException( errorClass = "MISSING_STATIC_PARTITION_COLUMN", - messageParameters = Array(staticName)) + messageParameters = Map("columnName" -> staticName)) } def nestedGeneratorError(trimmedNestedGenerator: Expression): Throwable = { new AnalysisException(errorClass = "UNSUPPORTED_GENERATOR", errorSubClass = "NESTED_IN_EXPRESSIONS", - messageParameters = Array(toSQLExpr(trimmedNestedGenerator))) + messageParameters = Map("expression" -> toSQLExpr(trimmedNestedGenerator))) } def moreThanOneGeneratorError(generators: Seq[Expression], clause: String): Throwable = { - new AnalysisException(errorClass = "UNSUPPORTED_GENERATOR", + new AnalysisException( + errorClass = "UNSUPPORTED_GENERATOR", errorSubClass = "MULTI_GENERATOR", - messageParameters = Array(clause, - generators.size.toString, generators.map(toSQLExpr).mkString(", "))) + messageParameters = Map( + "clause" -> clause, + "num" -> generators.size.toString, + "generators" -> generators.map(toSQLExpr).mkString(", "))) } def generatorOutsideSelectError(plan: LogicalPlan): Throwable = { - new AnalysisException(errorClass = "UNSUPPORTED_GENERATOR", + new AnalysisException( + errorClass = "UNSUPPORTED_GENERATOR", errorSubClass = "OUTSIDE_SELECT", - messageParameters = Array(plan.simpleString(SQLConf.get.maxToStringFields))) + messageParameters = Map("plan" -> plan.simpleString(SQLConf.get.maxToStringFields))) } def legacyStoreAssignmentPolicyError(): Throwable = { @@ -168,43 +180,49 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { colName: String, candidates: Seq[String], origin: Origin): Throwable = { + val commonParam = Map("objectName" -> toSQLId(colName)) + val proposalParam = if (candidates.isEmpty) { + Map.empty[String, String] + } else { + Map("proposal" -> candidates.take(5).map(toSQLId).mkString(", ")) + } new AnalysisException( errorClass = errorClass, errorSubClass = if (candidates.isEmpty) "WITHOUT_SUGGESTION" else "WITH_SUGGESTION", - messageParameters = Array.concat(Array(toSQLId(colName)), if (candidates.isEmpty) { - Array.empty - } else { - Array(candidates.take(5).map(toSQLId).mkString(", ")) - }), + messageParameters = commonParam ++ proposalParam, origin = origin ) } def unresolvedColumnError(columnName: String, proposal: Seq[String]): Throwable = { + val commonParam = Map("objectName" -> toSQLId(columnName)) + val proposalParam = if (proposal.isEmpty) { + Map.empty[String, String] + } else { + Map("proposal" -> proposal.take(5).map(toSQLId).mkString(", ")) + } new AnalysisException( errorClass = "UNRESOLVED_COLUMN", errorSubClass = if (proposal.isEmpty) "WITHOUT_SUGGESTION" else "WITH_SUGGESTION", - messageParameters = Array.concat(Array(toSQLId(columnName)), if (proposal.isEmpty) { - Array.empty - } else { - Array(proposal.take(5).map(toSQLId).mkString(", ")) - })) + messageParameters = commonParam ++ proposalParam) } def unresolvedFieldError( fieldName: String, columnPath: Seq[String], proposal: Seq[String]): Throwable = { + val commonParams = Map( + "fieldName" -> toSQLId(fieldName), + "columnPath" -> toSQLId(columnPath)) + val proposalParam = if (proposal.isEmpty) { + Map.empty[String, String] + } else { + Map("proposal" -> proposal.map(toSQLId).mkString(", ")) + } new AnalysisException( errorClass = "UNRESOLVED_FIELD", errorSubClass = if (proposal.isEmpty) "WITHOUT_SUGGESTION" else "WITH_SUGGESTION", - messageParameters = - Array.concat(Array(toSQLId(fieldName), toSQLId(columnPath)), if (proposal.isEmpty) { - Array.empty - } else { - Array(proposal.map(toSQLId).mkString(", ")) - }) - ) + messageParameters = commonParams ++ proposalParam) } def dataTypeMismatchForDeserializerError( @@ -212,8 +230,9 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { new AnalysisException( errorClass = "UNSUPPORTED_DESERIALIZER", errorSubClass = "DATA_TYPE_MISMATCH", - messageParameters = - Array(toSQLType(desiredType), toSQLType(dataType))) + messageParameters = Map( + "desiredType" -> toSQLType(desiredType), + "dataType" -> toSQLType(dataType))) } def fieldNumberMismatchForDeserializerError( @@ -221,22 +240,23 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { new AnalysisException( errorClass = "UNSUPPORTED_DESERIALIZER", errorSubClass = "FIELD_NUMBER_MISMATCH", - messageParameters = - Array(toSQLType(schema), (maxOrdinal + 1).toString)) + messageParameters = Map( + "schema" -> toSQLType(schema), + "ordinal" -> (maxOrdinal + 1).toString)) } def upCastFailureError( fromStr: String, from: Expression, to: DataType, walkedTypePath: Seq[String]): Throwable = { new AnalysisException( errorClass = "CANNOT_UP_CAST_DATATYPE", - messageParameters = Array( - fromStr, - toSQLType(from.dataType), - toSQLType(to), - s"The type path of the target object is:\n" + walkedTypePath.mkString("", "\n", "\n") + + messageParameters = Map( + "expression" -> fromStr, + "sourceType" -> toSQLType(from.dataType), + "targetType" -> toSQLType(to), + "details" -> (s"The type path of the target object is:\n" + + walkedTypePath.mkString("", "\n", "\n") + "You can either add an explicit cast to the input data or choose a higher precision " + - "type of the field in the target object" - ) + "type of the field in the target object")) ) } @@ -264,14 +284,14 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def groupingMustWithGroupingSetsOrCubeOrRollupError(): Throwable = { new AnalysisException( errorClass = "UNSUPPORTED_GROUPING_EXPRESSION", - messageParameters = Array.empty) + messageParameters = Map.empty) } def pandasUDFAggregateNotSupportedInPivotError(): Throwable = { new AnalysisException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "PANDAS_UDAF_IN_PIVOT", - messageParameters = Array[String]()) + messageParameters = Map.empty) } def aggregateExpressionRequiredForPivotError(sql: String): Throwable = { @@ -394,19 +414,25 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { expr: Expression): Throwable = { new AnalysisException( errorClass = "GROUP_BY_POS_REFERS_AGG_EXPR", - messageParameters = Array(index.toString, expr.sql)) + messageParameters = Map( + "index" -> index.toString, + "aggExpr" -> expr.sql)) } def groupByPositionRangeError(index: Int, size: Int): Throwable = { new AnalysisException( errorClass = "GROUP_BY_POS_OUT_OF_RANGE", - messageParameters = Array(index.toString, size.toString)) + messageParameters = Map( + "index" -> index.toString, + "size" -> size.toString)) } def generatorNotExpectedError(name: FunctionIdentifier, classCanonicalName: String): Throwable = { new AnalysisException(errorClass = "UNSUPPORTED_GENERATOR", errorSubClass = "NOT_GENERATOR", - messageParameters = Array(toSQLId(name.toString), classCanonicalName)) + messageParameters = Map( + "functionName" -> toSQLId(name.toString), + "classCanonicalName" -> classCanonicalName)) } def functionWithUnsupportedSyntaxError(prettyName: String, syntax: String): Throwable = { @@ -573,7 +599,9 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { new AnalysisException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "TABLE_OPERATION", - messageParameters = Array(toSQLId(nameParts), operation)) + messageParameters = Map( + "tableName" -> toSQLId(nameParts), + "operation" -> operation)) } def alterColumnWithV1TableCannotSpecifyNotNullError(): Throwable = { @@ -813,19 +841,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { s"The '$argName' parameter of function '$funcName' needs to be a $requiredType literal.") } - def invalidFormatInConversion( - argName: String, - funcName: String, - expected: String, - context: SQLQueryContext): Throwable = { - new AnalysisException( - errorClass = "INVALID_PARAMETER_VALUE", - messageParameters = - Array(toSQLId(argName), toSQLId(funcName), expected), - context = getQueryContext(context), - summary = getSummary(context)) - } - def invalidStringLiteralParameter( funcName: String, argName: String, @@ -863,7 +878,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def noHandlerForUDAFError(name: String): Throwable = { new InvalidUDFClassException( errorClass = "NO_HANDLER_FOR_UDAF", - messageParameters = Array(name)) + messageParameters = Map("functionName" -> name)) } def batchWriteCapabilityError( @@ -1440,7 +1455,8 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { groupAggPandasUDFNames: Seq[String]): Throwable = { new AnalysisException( errorClass = "INVALID_PANDAS_UDF_PLACEMENT", - messageParameters = Array(groupAggPandasUDFNames.map(toSQLId).mkString(", "))) + messageParameters = Map( + "functionList" -> groupAggPandasUDFNames.map(toSQLId).mkString(", "))) } def ambiguousAttributesInSelfJoinError( @@ -1461,7 +1477,9 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { name: Seq[String], numMatches: Int, context: Origin): Throwable = { new AnalysisException( errorClass = "AMBIGUOUS_COLUMN_OR_FIELD", - messageParameters = Array(toSQLId(name), numMatches.toString), + messageParameters = Map( + "name" -> toSQLId(name), + "n" -> numMatches.toString), origin = context) } @@ -1469,7 +1487,9 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { name: Seq[String], numMatches: Int): Throwable = { new AnalysisException( errorClass = "AMBIGUOUS_COLUMN_OR_FIELD", - messageParameters = Array(toSQLId(name), numMatches.toString)) + messageParameters = Map( + "name" -> toSQLId(name), + "n" -> numMatches.toString)) } def cannotUseIntervalTypeInTableSchemaError(): Throwable = { @@ -1610,7 +1630,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { // The second argument of {function} function needs to be an integer new AnalysisException( errorClass = "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER", - messageParameters = Array(function), + messageParameters = Map("functionName" -> function), cause = Some(e)) } @@ -1663,7 +1683,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { new AnalysisException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "PYTHON_UDF_IN_ON_CLAUSE", - messageParameters = Array(s"${toSQLStmt(joinType.sql)}")) + messageParameters = Map("joinType" -> toSQLStmt(joinType.sql))) } def conflictingAttributesInJoinConditionError( @@ -2019,15 +2039,19 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def descPartitionNotAllowedOnTempView(table: String): Throwable = { new AnalysisException( errorClass = "FORBIDDEN_OPERATION", - messageParameters = - Array(toSQLStmt("DESC PARTITION"), "TEMPORARY VIEW", toSQLId(table))) + messageParameters = Map( + "statement" -> toSQLStmt("DESC PARTITION"), + "objectType" -> "TEMPORARY VIEW", + "objectName" -> toSQLId(table))) } def descPartitionNotAllowedOnView(table: String): Throwable = { new AnalysisException( errorClass = "FORBIDDEN_OPERATION", - messageParameters = Array( - toSQLStmt("DESC PARTITION"), "VIEW", toSQLId(table))) + messageParameters = Map( + "statement" -> toSQLStmt("DESC PARTITION"), + "objectType" -> "VIEW", + "objectName" -> toSQLId(table))) } def showPartitionNotAllowedOnTableNotPartitionedError(tableIdentWithDB: String): Throwable = { @@ -2348,7 +2372,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def usingUntypedScalaUDFError(): Throwable = { new AnalysisException( errorClass = "UNTYPED_SCALA_UDF", - messageParameters = Array.empty) + messageParameters = Map.empty) } def aggregationFunctionAppliedOnNonNumericColumnError(colName: String): Throwable = { @@ -2386,20 +2410,20 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def udfClassDoesNotImplementAnyUDFInterfaceError(className: String): Throwable = { new AnalysisException( errorClass = "NO_UDF_INTERFACE_ERROR", - messageParameters = Array(className)) + messageParameters = Map("className" -> className)) } def udfClassImplementMultiUDFInterfacesError(className: String): Throwable = { new AnalysisException( errorClass = "MULTI_UDF_INTERFACE_ERROR", - messageParameters = Array(className)) + messageParameters = Map("className" -> className)) } def udfClassWithTooManyTypeArgumentsError(n: Int): Throwable = { new AnalysisException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "TOO_MANY_TYPE_ARGUMENTS_FOR_UDF_CLASS", - messageParameters = Array(s"$n")) + messageParameters = Map("num" -> s"$n")) } def classWithoutPublicNonArgumentConstructorError(className: String): Throwable = { @@ -2428,14 +2452,16 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def invalidFieldName(fieldName: Seq[String], path: Seq[String], context: Origin): Throwable = { new AnalysisException( errorClass = "INVALID_FIELD_NAME", - messageParameters = Array(toSQLId(fieldName), toSQLId(path)), + messageParameters = Map( + "fieldName" -> toSQLId(fieldName), + "path" -> toSQLId(path)), origin = context) } def invalidJsonSchema(schema: DataType): Throwable = { new AnalysisException( errorClass = "INVALID_JSON_SCHEMA_MAP_TYPE", - messageParameters = Array(toSQLType(schema))) + messageParameters = Map("jsonSchema" -> toSQLType(schema))) } def tableIndexNotSupportedError(errorMessage: String): Throwable = { @@ -2537,13 +2563,13 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def nullableColumnOrFieldError(name: Seq[String]): Throwable = { new AnalysisException( errorClass = "NULLABLE_COLUMN_OR_FIELD", - messageParameters = Array(toSQLId(name))) + messageParameters = Map("name" -> toSQLId(name))) } def nullableArrayOrMapElementError(path: Seq[String]): Throwable = { new AnalysisException( errorClass = "NULLABLE_ARRAY_OR_MAP_ELEMENT", - messageParameters = Array(toSQLId(path))) + messageParameters = Map("columnPath" -> toSQLId(path))) } def invalidColumnOrFieldDataTypeError( @@ -2552,13 +2578,16 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { expected: DataType): Throwable = { new AnalysisException( errorClass = "INVALID_COLUMN_OR_FIELD_DATA_TYPE", - messageParameters = Array(toSQLId(name), toSQLType(dt), toSQLType(expected))) + messageParameters = Map( + "name" -> toSQLId(name), + "type" -> toSQLType(dt), + "expectedType" -> toSQLType(expected))) } def columnNotInGroupByClauseError(expression: Expression): Throwable = { new AnalysisException( errorClass = "COLUMN_NOT_IN_GROUP_BY_CLAUSE", - messageParameters = Array(toSQLExpr(expression)) + messageParameters = Map("expression" -> toSQLExpr(expression)) ) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 662a10cf3acf9..1443e771efa48 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -66,28 +66,25 @@ import org.apache.spark.util.CircularBuffer private[sql] object QueryExecutionErrors extends QueryErrorsBase { def cannotEvaluateExpressionError(expression: Expression): Throwable = { - new SparkUnsupportedOperationException(errorClass = "INTERNAL_ERROR", - messageParameters = Array(s"Cannot evaluate expression: $expression")) + SparkException.internalError(s"Cannot evaluate expression: $expression") } def cannotGenerateCodeForExpressionError(expression: Expression): Throwable = { - new SparkUnsupportedOperationException(errorClass = "INTERNAL_ERROR", - messageParameters = Array(s"Cannot generate code for expression: $expression")) + SparkException.internalError(s"Cannot generate code for expression: $expression") } def cannotTerminateGeneratorError(generator: UnresolvedGenerator): Throwable = { - new SparkUnsupportedOperationException(errorClass = "INTERNAL_ERROR", - messageParameters = Array(s"Cannot terminate expression: $generator")) + SparkException.internalError(s"Cannot terminate expression: $generator") } def castingCauseOverflowError(t: Any, from: DataType, to: DataType): ArithmeticException = { new SparkArithmeticException( errorClass = "CAST_OVERFLOW", - messageParameters = Array( - toSQLValue(t, from), - toSQLType(from), - toSQLType(to), - toSQLConf(SQLConf.ANSI_ENABLED.key)), + messageParameters = Map( + "value" -> toSQLValue(t, from), + "sourceType" -> toSQLType(from), + "targetType" -> toSQLType(to), + "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = Array.empty, summary = "") } @@ -98,10 +95,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { columnName: String): ArithmeticException = { new SparkArithmeticException( errorClass = "CAST_OVERFLOW_IN_TABLE_INSERT", - messageParameters = Array( - toSQLType(from), - toSQLType(to), - toSQLId(columnName)), + messageParameters = Map( + "sourceType" -> toSQLType(from), + "targetType" -> toSQLType(to), + "columnName" -> toSQLId(columnName)), context = Array.empty, summary = "" ) @@ -114,11 +111,11 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { context: SQLQueryContext = null): ArithmeticException = { new SparkArithmeticException( errorClass = "NUMERIC_VALUE_OUT_OF_RANGE", - messageParameters = Array( - value.toPlainString, - decimalPrecision.toString, - decimalScale.toString, - toSQLConf(SQLConf.ANSI_ENABLED.key)), + messageParameters = Map( + "value" -> value.toPlainString, + "precision" -> decimalPrecision.toString, + "scale" -> decimalScale.toString, + "config" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = getQueryContext(context), summary = getSummary(context)) } @@ -130,11 +127,11 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { context: SQLQueryContext): Throwable = { new SparkDateTimeException( errorClass = "CAST_INVALID_INPUT", - messageParameters = Array( - toSQLValue(value, from), - toSQLType(from), - toSQLType(to), - toSQLConf(SQLConf.ANSI_ENABLED.key)), + messageParameters = Map( + "expression" -> toSQLValue(value, from), + "sourceType" -> toSQLType(from), + "targetType" -> toSQLType(to), + "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = getQueryContext(context), summary = getSummary(context)) } @@ -144,11 +141,11 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { context: SQLQueryContext): SparkRuntimeException = { new SparkRuntimeException( errorClass = "CAST_INVALID_INPUT", - messageParameters = Array( - toSQLValue(s, StringType), - toSQLType(StringType), - toSQLType(BooleanType), - toSQLConf(SQLConf.ANSI_ENABLED.key)), + messageParameters = Map( + "expression" -> toSQLValue(s, StringType), + "sourceType" -> toSQLType(StringType), + "targetType" -> toSQLType(BooleanType), + "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = getQueryContext(context), summary = getSummary(context)) } @@ -159,11 +156,11 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { context: SQLQueryContext): SparkNumberFormatException = { new SparkNumberFormatException( errorClass = "CAST_INVALID_INPUT", - messageParameters = Array( - toSQLValue(s, StringType), - toSQLType(StringType), - toSQLType(to), - toSQLConf(SQLConf.ANSI_ENABLED.key)), + messageParameters = Map( + "expression" -> toSQLValue(s, StringType), + "sourceType" -> toSQLType(StringType), + "targetType" -> toSQLType(to), + "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = getQueryContext(context), summary = getSummary(context)) } @@ -175,44 +172,58 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { hint: String): SparkIllegalArgumentException = { new SparkIllegalArgumentException( errorClass = "CONVERSION_INVALID_INPUT", - messageParameters = Array( - toSQLValue(s, StringType), - toSQLValue(fmt, StringType), - toSQLType(to), - toSQLId(hint))) + messageParameters = Map( + "str" -> toSQLValue(s, StringType), + "fmt" -> toSQLValue(fmt, StringType), + "targetType" -> toSQLType(to), + "suggestion" -> toSQLId(hint))) } def cannotCastFromNullTypeError(to: DataType): Throwable = { - new SparkException(errorClass = "CANNOT_CAST_DATATYPE", - messageParameters = Array(NullType.typeName, to.typeName), null) + new SparkException( + errorClass = "CANNOT_CAST_DATATYPE", + messageParameters = Map( + "sourceType" -> NullType.typeName, + "targetType" -> to.typeName), + cause = null) } def cannotCastError(from: DataType, to: DataType): Throwable = { - new SparkException(errorClass = "CANNOT_CAST_DATATYPE", - messageParameters = Array(from.typeName, to.typeName), null) + new SparkException( + errorClass = "CANNOT_CAST_DATATYPE", + messageParameters = Map( + "sourceType" -> from.typeName, + "targetType" -> to.typeName), + cause = null) } def cannotParseDecimalError(): Throwable = { new SparkRuntimeException( errorClass = "CANNOT_PARSE_DECIMAL", - messageParameters = Array.empty) + messageParameters = Map.empty) } def dataTypeUnsupportedError(dataType: String, failure: String): Throwable = { - new SparkIllegalArgumentException(errorClass = "UNSUPPORTED_DATATYPE", - messageParameters = Array(dataType + failure)) + new SparkIllegalArgumentException( + errorClass = "UNSUPPORTED_DATATYPE", + messageParameters = Map("typeName" -> (dataType + failure))) } def failedExecuteUserDefinedFunctionError(funcCls: String, inputTypes: String, outputType: String, e: Throwable): Throwable = { - new SparkException(errorClass = "FAILED_EXECUTE_UDF", - messageParameters = Array(funcCls, inputTypes, outputType), e) + new SparkException( + errorClass = "FAILED_EXECUTE_UDF", + messageParameters = Map( + "functionName" -> funcCls, + "signature" -> inputTypes, + "result" -> outputType), + cause = e) } def divideByZeroError(context: SQLQueryContext): ArithmeticException = { new SparkArithmeticException( errorClass = "DIVIDE_BY_ZERO", - messageParameters = Array(toSQLConf(SQLConf.ANSI_ENABLED.key)), + messageParameters = Map("config" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = getQueryContext(context), summary = getSummary(context)) } @@ -220,7 +231,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def intervalDividedByZeroError(context: SQLQueryContext): ArithmeticException = { new SparkArithmeticException( errorClass = "INTERVAL_DIVIDED_BY_ZERO", - messageParameters = Array.empty, + messageParameters = Map.empty, context = getQueryContext(context), summary = getSummary(context)) } @@ -231,10 +242,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { context: SQLQueryContext): ArrayIndexOutOfBoundsException = { new SparkArrayIndexOutOfBoundsException( errorClass = "INVALID_ARRAY_INDEX", - messageParameters = Array( - toSQLValue(index, IntegerType), - toSQLValue(numElements, IntegerType), - toSQLConf(SQLConf.ANSI_ENABLED.key)), + messageParameters = Map( + "indexValue" -> toSQLValue(index, IntegerType), + "arraySize" -> toSQLValue(numElements, IntegerType), + "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = getQueryContext(context), summary = getSummary(context)) } @@ -245,11 +256,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { context: SQLQueryContext): ArrayIndexOutOfBoundsException = { new SparkArrayIndexOutOfBoundsException( errorClass = "INVALID_ARRAY_INDEX_IN_ELEMENT_AT", - messageParameters = - Array( - toSQLValue(index, IntegerType), - toSQLValue(numElements, IntegerType), - toSQLConf(SQLConf.ANSI_ENABLED.key)), + messageParameters = Map( + "indexValue" -> toSQLValue(index, IntegerType), + "arraySize" -> toSQLValue(numElements, IntegerType), + "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = getQueryContext(context), summary = getSummary(context)) } @@ -258,7 +268,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkDateTimeException( errorClass = "INVALID_FRACTION_OF_SECOND", errorSubClass = None, - Array(toSQLConf(SQLConf.ANSI_ENABLED.key)), + messageParameters = Map( + "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key) + ), context = Array.empty, summary = "") } @@ -267,7 +279,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkDateTimeException( errorClass = "CANNOT_PARSE_TIMESTAMP", errorSubClass = None, - Array(e.getMessage, toSQLConf(SQLConf.ANSI_ENABLED.key)), + messageParameters = Map( + "message" -> e.getMessage, + "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = Array.empty, summary = "") } @@ -306,14 +320,18 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkRuntimeException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "LITERAL_TYPE", - messageParameters = Array( s"${v.toString}", s"${v.getClass.toString}")) + messageParameters = Map( + "value" -> v.toString, + "type" -> v.getClass.toString)) } def pivotColumnUnsupportedError(v: Any, dataType: DataType): RuntimeException = { new SparkRuntimeException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "PIVOT_TYPE", - messageParameters = Array(s"${v.toString}", s"${toSQLType(dataType)}")) + messageParameters = Map( + "value" -> v.toString, + "type" -> toSQLType(dataType))) } def noDefaultForDataTypeError(dataType: DataType): RuntimeException = { @@ -340,8 +358,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } def illegalUrlError(url: UTF8String): Throwable = { - new SparkIllegalArgumentException(errorClass = "CANNOT_DECODE_URL", - messageParameters = Array(url.toString) + new SparkIllegalArgumentException( + errorClass = "CANNOT_DECODE_URL", + messageParameters = Map("url" -> url.toString) ) } @@ -394,9 +413,8 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } def methodNotDeclaredError(name: String): Throwable = { - new SparkNoSuchMethodException(errorClass = "INTERNAL_ERROR", - messageParameters = Array( - s"""A method named "$name" is not declared in any enclosing class nor any supertype""")) + SparkException.internalError( + s"""A method named "$name" is not declared in any enclosing class nor any supertype""") } def constructorNotFoundError(cls: String): Throwable = { @@ -516,7 +534,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } else "" new SparkArithmeticException( errorClass = "ARITHMETIC_OVERFLOW", - messageParameters = Array(message, alternative, SQLConf.ANSI_ENABLED.key), + messageParameters = Map( + "message" -> message, + "alternative" -> alternative, + "config" -> toSQLConf(SQLConf.ANSI_ENABLED.key)), context = getQueryContext(context), summary = getSummary(context)) } @@ -540,7 +561,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } else "" new SparkArithmeticException( errorClass = "INTERVAL_ARITHMETIC_OVERFLOW", - messageParameters = Array(message, alternative), + messageParameters = Map( + "message" -> message, + "alternative" -> alternative), context = getQueryContext(context), summary = getSummary(context)) } @@ -579,10 +602,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { """.stripMargin) } - def inferDateWithLegacyTimeParserError(): Throwable with SparkThrowable = { - new SparkIllegalArgumentException(errorClass = "CANNOT_INFER_DATE", - messageParameters = Array() - ) + def inferDateWithLegacyTimeParserError(): Throwable = { + new SparkIllegalArgumentException( + errorClass = "CANNOT_INFER_DATE", + messageParameters = Map.empty) } def streamedOperatorUnsupportedByDataSourceError( @@ -610,8 +633,11 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } def incompatibleDataSourceRegisterError(e: Throwable): Throwable = { - new SparkClassNotFoundException("INCOMPATIBLE_DATASOURCE_REGISTER", None, - Array(e.getMessage), e) + new SparkClassNotFoundException( + errorClass = "INCOMPATIBLE_DATASOURCE_REGISTER", + errorSubClass = None, + messageParameters = Map("message" -> e.getMessage), + cause = e) } def sparkUpgradeInReadingDatesError( @@ -619,12 +645,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkUpgradeException( errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", errorSubClass = Some("READ_ANCIENT_DATETIME"), - messageParameters = Array( - format, - toSQLConf(config), - toDSOption(option), - toSQLConf(config), - toDSOption(option)), + messageParameters = Map( + "format" -> format, + "config" -> toSQLConf(config), + "option" -> toDSOption(option)), cause = null ) } @@ -633,10 +657,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkUpgradeException( errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", errorSubClass = Some("WRITE_ANCIENT_DATETIME"), - messageParameters = Array( - format, - toSQLConf(config), - toSQLConf(config)), + messageParameters = Map( + "format" -> format, + "config" -> toSQLConf(config)), cause = null ) } @@ -664,14 +687,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } def saveModeUnsupportedError(saveMode: Any, pathExists: Boolean): Throwable = { - pathExists match { - case true => new SparkIllegalArgumentException(errorClass = "UNSUPPORTED_SAVE_MODE", - errorSubClass = Some("EXISTENT_PATH"), - messageParameters = Array(toSQLValue(saveMode, StringType))) - case _ => new SparkIllegalArgumentException(errorClass = "UNSUPPORTED_SAVE_MODE", - errorSubClass = Some("NON_EXISTENT_PATH"), - messageParameters = Array(toSQLValue(saveMode, StringType))) - } + new SparkIllegalArgumentException( + errorClass = "UNSUPPORTED_SAVE_MODE", + errorSubClass = Some(if (pathExists) "EXISTENT_PATH" else "NON_EXISTENT_PATH"), + messageParameters = Map("saveMode" -> toSQLValue(saveMode, StringType))) } def cannotClearOutputDirectoryError(staticPrefixPath: Path): Throwable = { @@ -759,7 +778,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def writingJobAbortedError(e: Throwable): Throwable = { new SparkException( errorClass = "WRITING_JOB_ABORTED", - messageParameters = Array.empty, + messageParameters = Map.empty, cause = e) } @@ -825,7 +844,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } def unrecognizedSqlTypeError(sqlType: Int): Throwable = { - new SparkSQLException(errorClass = "UNRECOGNIZED_SQL_TYPE", Array(sqlType.toString)) + new SparkSQLException( + errorClass = "UNRECOGNIZED_SQL_TYPE", + messageParameters = Map("typeName" -> sqlType.toString)) } def unsupportedJdbcTypeError(content: String): Throwable = { @@ -855,7 +876,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkSQLFeatureNotSupportedException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "JDBC_TRANSACTION", - messageParameters = Array[String]()) + messageParameters = Map.empty[String, String]) } def dataTypeUnsupportedYetError(dataType: DataType): Throwable = { @@ -1062,9 +1083,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkUpgradeException( errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", errorSubClass = Some("PARSE_DATETIME_BY_NEW_PARSER"), - messageParameters = Array( - toSQLValue(s, StringType), - toSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key)), + messageParameters = Map( + "datetime" -> toSQLValue(s, StringType), + "config" -> toSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key)), e) } @@ -1072,9 +1093,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkUpgradeException( errorClass = "INCONSISTENT_BEHAVIOR_CROSS_VERSION", errorSubClass = Some("DATETIME_PATTERN_RECOGNITION"), - messageParameters = Array( - toSQLValue(pattern, StringType), - toSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key)), + messageParameters = Map( + "pattern" -> toSQLValue(pattern, StringType), + "config" -> toSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key)), e) } @@ -1090,7 +1111,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } def concurrentQueryInstanceError(): Throwable = { - new SparkConcurrentModificationException("CONCURRENT_QUERY", None, Array.empty) + new SparkConcurrentModificationException( + errorClass = "CONCURRENT_QUERY", + errorSubClass = None, + messageParameters = Map.empty[String, String]) } def cannotParseJsonArraysAsStructsError(): Throwable = { @@ -1236,7 +1260,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkRuntimeException( errorClass = "ELEMENT_AT_BY_INDEX_ZERO", cause = null, - messageParameters = Array.empty, + messageParameters = Map.empty, context = getQueryContext(context), summary = getSummary(context)) } @@ -1490,8 +1514,12 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } def renamePathAsExistsPathError(srcPath: Path, dstPath: Path): Throwable = { - new SparkFileAlreadyExistsException(errorClass = "FAILED_RENAME_PATH", None, - Array(srcPath.toString, dstPath.toString)) + new SparkFileAlreadyExistsException( + errorClass = "FAILED_RENAME_PATH", + errorSubClass = None, + messageParameters = Map( + "sourcePath" -> srcPath.toString, + "targetPath" -> dstPath.toString)) } def renameAsExistsPathError(dstPath: Path): Throwable = { @@ -1499,8 +1527,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } def renameSrcPathNotFoundError(srcPath: Path): Throwable = { - new SparkFileNotFoundException(errorClass = "RENAME_SRC_PATH_NOT_FOUND", None, - Array(srcPath.toString)) + new SparkFileNotFoundException( + errorClass = "RENAME_SRC_PATH_NOT_FOUND", + errorSubClass = None, + messageParameters = Map("sourcePath" -> srcPath.toString)) } def failedRenameTempFileError(srcPath: Path, dstPath: Path): Throwable = { @@ -1697,8 +1727,12 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { permission: FsPermission, path: Path, e: Throwable): Throwable = { - new SparkSecurityException(errorClass = "RESET_PERMISSION_TO_ORIGINAL", None, - Array(permission.toString, path.toString, e.getMessage)) + new SparkSecurityException( + errorClass = "RESET_PERMISSION_TO_ORIGINAL", None, + messageParameters = Map( + "permission" -> permission.toString, + "path" -> path.toString, + "message" -> e.getMessage)) } def failToSetOriginalACLBackError(aclEntries: String, path: Path, e: Throwable): Throwable = { @@ -1952,14 +1986,14 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkUnsupportedOperationException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "REPEATED_PIVOT", - messageParameters = Array[String]()) + messageParameters = Map.empty[String, String]) } def pivotNotAfterGroupByUnsupportedError(): Throwable = { new SparkUnsupportedOperationException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "PIVOT_AFTER_GROUP_BY", - messageParameters = Array[String]()) + messageParameters = Map.empty[String, String]) } private val aesFuncName = toSQLId("aes_encrypt") + "/" + toSQLId("aes_decrypt") @@ -1967,26 +2001,30 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def invalidAesKeyLengthError(actualLength: Int): RuntimeException = { new SparkRuntimeException( errorClass = "INVALID_PARAMETER_VALUE", - messageParameters = Array( - "key", - aesFuncName, - s"expects a binary value with 16, 24 or 32 bytes, but got ${actualLength.toString} bytes.")) + messageParameters = Map( + "parameter" -> "key", + "functionName" -> aesFuncName, + "expected" -> ("expects a binary value with 16, 24 or 32 bytes, " + + s"but got ${actualLength.toString} bytes."))) } def aesModeUnsupportedError(mode: String, padding: String): RuntimeException = { new SparkRuntimeException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "AES_MODE", - messageParameters = Array(mode, padding, aesFuncName)) + messageParameters = Map( + "mode" -> mode, + "padding" -> padding, + "functionName" -> aesFuncName)) } def aesCryptoError(detailMessage: String): RuntimeException = { new SparkRuntimeException( errorClass = "INVALID_PARAMETER_VALUE", - messageParameters = Array( - "expr, key", - aesFuncName, - s"Detail message: $detailMessage")) + messageParameters = Map( + "parameter" -> "expr, key", + "functionName" -> aesFuncName, + "expected" -> s"Detail message: $detailMessage")) } def hiveTableWithAnsiIntervalsError(tableName: String): Throwable = { @@ -1997,16 +2035,18 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { new SparkUnsupportedOperationException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "ORC_TYPE_CAST", - messageParameters = Array(toSQLType(TimestampType), - toSQLType(TimestampNTZType))) + messageParameters = Map( + "orcType" -> toSQLType(TimestampType), + "toType" -> toSQLType(TimestampNTZType))) } def cannotConvertOrcTimestampNTZToTimestampLTZError(): Throwable = { new SparkUnsupportedOperationException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "ORC_TYPE_CAST", - messageParameters = Array(toSQLType(TimestampNTZType), - toSQLType(TimestampType))) + messageParameters = Map( + "orcType" -> toSQLType(TimestampNTZType), + "toType" -> toSQLType(TimestampType))) } def writePartitionExceedConfigSizeWhenDynamicPartitionError( @@ -2044,38 +2084,42 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def timestampAddOverflowError(micros: Long, amount: Int, unit: String): ArithmeticException = { new SparkArithmeticException( errorClass = "DATETIME_OVERFLOW", - messageParameters = Array( - s"add ${toSQLValue(amount, IntegerType)} $unit to " + - s"${toSQLValue(DateTimeUtils.microsToInstant(micros), TimestampType)}"), + messageParameters = Map( + "operation" -> (s"add ${toSQLValue(amount, IntegerType)} $unit to " + + s"${toSQLValue(DateTimeUtils.microsToInstant(micros), TimestampType)}")), context = Array.empty, summary = "") } def invalidBucketFile(path: String): Throwable = { - new SparkException(errorClass = "INVALID_BUCKET_FILE", messageParameters = Array(path), + new SparkException( + errorClass = "INVALID_BUCKET_FILE", + messageParameters = Map("path" -> path), cause = null) } def multipleRowSubqueryError(context: SQLQueryContext): Throwable = { new SparkException( errorClass = "MULTI_VALUE_SUBQUERY_ERROR", - messageParameters = Array.empty, + messageParameters = Map.empty, cause = null, context = getQueryContext(context), summary = getSummary(context)) } def nullComparisonResultError(): Throwable = { - new SparkException(errorClass = "NULL_COMPARISON_RESULT", - messageParameters = Array(), cause = null) + new SparkException( + errorClass = "NULL_COMPARISON_RESULT", + messageParameters = Map.empty, + cause = null) } def invalidPatternError(funcName: String, pattern: String): RuntimeException = { new SparkRuntimeException( errorClass = "INVALID_PARAMETER_VALUE", - messageParameters = Array( - "regexp", - toSQLId(funcName), - pattern)) + messageParameters = Map( + "parameter" -> "regexp", + "functionName" -> toSQLId(funcName), + "expected" -> pattern)) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala index d4629f0dd3fe4..51b68383cd0e4 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala @@ -79,8 +79,8 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { def emptyPartitionKeyError(key: String, ctx: PartitionSpecContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", - messageParameters = - Array(s"Partition key ${toSQLId(key)} must set value (can't be empty)."), + messageParameters = Map( + "inputString" -> s"Partition key ${toSQLId(key)} must set value (can't be empty)."), ctx) } @@ -97,7 +97,7 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { new ParseException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "TRANSFORM_DISTINCT_ALL", - messageParameters = Array[String](), + messageParameters = Map.empty, ctx) } @@ -105,7 +105,7 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { new ParseException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "TRANSFORM_NON_HIVE", - messageParameters = Array[String](), + messageParameters = Map.empty, ctx) } @@ -117,7 +117,7 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { new ParseException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "LATERAL_NATURAL_JOIN", - messageParameters = Array[String](), + messageParameters = Map.empty, ctx) } @@ -125,7 +125,7 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { new ParseException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "LATERAL_JOIN_USING", - messageParameters = Array[String](), + messageParameters = Map.empty, ctx) } @@ -133,36 +133,47 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { new ParseException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "LATERAL_JOIN_OF_TYPE", - messageParameters = Array(s"${toSQLStmt(joinType)}"), + messageParameters = Map("joinType" -> toSQLStmt(joinType)), ctx) } def invalidLateralJoinRelationError(ctx: RelationPrimaryContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", - messageParameters = Array(s"${toSQLStmt("LATERAL")} can only be used with subquery."), + messageParameters = Map( + "inputString" -> s"${toSQLStmt("LATERAL")} can only be used with subquery."), ctx) } def repetitiveWindowDefinitionError(name: String, ctx: WindowClauseContext): Throwable = { - new ParseException("INVALID_SQL_SYNTAX", - Array(s"The definition of window ${toSQLId(name)} is repetitive."), ctx) + new ParseException( + errorClass = "INVALID_SQL_SYNTAX", + messageParameters = Map( + "inputString" -> s"The definition of window ${toSQLId(name)} is repetitive."), + ctx) } def invalidWindowReferenceError(name: String, ctx: WindowClauseContext): Throwable = { - new ParseException("INVALID_SQL_SYNTAX", - Array(s"Window reference ${toSQLId(name)} is not a window specification."), ctx) + new ParseException( + errorClass = "INVALID_SQL_SYNTAX", + messageParameters = Map( + "inputString" -> s"Window reference ${toSQLId(name)} is not a window specification."), + ctx) } def cannotResolveWindowReferenceError(name: String, ctx: WindowClauseContext): Throwable = { - new ParseException("INVALID_SQL_SYNTAX", - Array(s"Cannot resolve window reference ${toSQLId(name)}."), ctx) + new ParseException( + errorClass = "INVALID_SQL_SYNTAX", + messageParameters = Map( + "inputString" -> s"Cannot resolve window reference ${toSQLId(name)}."), + ctx) } def naturalCrossJoinUnsupportedError(ctx: RelationContext): Throwable = { - new ParseException(errorClass = "UNSUPPORTED_FEATURE", + new ParseException( + errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "NATURAL_CROSS_JOIN", - messageParameters = Array[String](), + messageParameters = Map.empty, ctx = ctx) } @@ -189,8 +200,11 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { } def functionNameUnsupportedError(functionName: String, ctx: ParserRuleContext): Throwable = { - new ParseException("INVALID_SQL_SYNTAX", - Array(s"Unsupported function name ${toSQLId(functionName)}"), ctx) + new ParseException( + errorClass = "INVALID_SQL_SYNTAX", + messageParameters = Map( + "inputString" -> s"Unsupported function name ${toSQLId(functionName)}"), + ctx) } def cannotParseValueTypeError( @@ -250,22 +264,27 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { } def charTypeMissingLengthError(dataType: String, ctx: PrimitiveDataTypeContext): Throwable = { - new ParseException("PARSE_CHAR_MISSING_LENGTH", Array(dataType, dataType), ctx) + new ParseException( + errorClass = "PARSE_CHAR_MISSING_LENGTH", + messageParameters = Map("type" -> toSQLType(dataType)), + ctx) } def partitionTransformNotExpectedError( name: String, describe: String, ctx: ApplyTransformContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", - messageParameters = - Array(s"Expected a column reference for transform ${toSQLId(name)}: $describe"), + messageParameters = Map( + "inputString" -> + s"Expected a column reference for transform ${toSQLId(name)}: $describe"), ctx) } def tooManyArgumentsForTransformError(name: String, ctx: ApplyTransformContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", - messageParameters = Array(s"Too many arguments for transform ${toSQLId(name)}"), + messageParameters = Map( + "inputString" -> s"Too many arguments for transform ${toSQLId(name)}"), ctx) } @@ -278,7 +297,7 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { new ParseException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "SET_NAMESPACE_PROPERTY", - messageParameters = Array(property, msg), + messageParameters = Map("property" -> property, "msg" -> msg), ctx) } @@ -286,7 +305,7 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { new ParseException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "SET_PROPERTIES_AND_DBPROPERTIES", - messageParameters = Array[String](), + messageParameters = Map.empty, ctx ) } @@ -296,7 +315,7 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { new ParseException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "SET_TABLE_PROPERTY", - messageParameters = Array(property, msg), + messageParameters = Map("property" -> property, "msg" -> msg), ctx) } @@ -330,7 +349,7 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { new ParseException( errorClass = "UNSUPPORTED_FEATURE", errorSubClass = "DESC_TABLE_COLUMN_PARTITION", - messageParameters = Array[String](), + messageParameters = Map.empty, ctx) } @@ -338,7 +357,8 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { key: String, ctx: DescribeRelationContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", - messageParameters = Array(s"PARTITION specification is incomplete: ${toSQLId(key)}"), + messageParameters = Map( + "inputString" -> s"PARTITION specification is incomplete: ${toSQLId(key)}"), ctx) } @@ -355,17 +375,19 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { def showFunctionsUnsupportedError(identifier: String, ctx: IdentifierContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", - messageParameters = Array( - s"${toSQLStmt("SHOW")} ${toSQLId(identifier)} ${toSQLStmt("FUNCTIONS")} not supported"), + messageParameters = Map( + "inputString" -> + s"${toSQLStmt("SHOW")} ${toSQLId(identifier)} ${toSQLStmt("FUNCTIONS")} not supported"), ctx) } def showFunctionsInvalidPatternError(pattern: String, ctx: ParserRuleContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", - messageParameters = Array( - s"Invalid pattern in ${toSQLStmt("SHOW FUNCTIONS")}: ${toSQLId(pattern)}. " + - s"It must be a ${toSQLType(StringType)} literal."), + messageParameters = Map( + "inputString" -> + (s"Invalid pattern in ${toSQLStmt("SHOW FUNCTIONS")}: ${toSQLId(pattern)}. " + + s"It must be a ${toSQLType(StringType)} literal.")), ctx) } @@ -388,7 +410,10 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { def duplicateKeysError(key: String, ctx: ParserRuleContext): Throwable = { // Found duplicate keys '$key' - new ParseException(errorClass = "DUPLICATE_KEY", messageParameters = Array(toSQLId(key)), ctx) + new ParseException( + errorClass = "DUPLICATE_KEY", + messageParameters = Map("keyColumn" -> toSQLId(key)), + ctx) } def unexpectedFormatForSetConfigurationError(ctx: ParserRuleContext): Throwable = { @@ -400,17 +425,21 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { def invalidPropertyKeyForSetQuotedConfigurationError( keyCandidate: String, valueStr: String, ctx: ParserRuleContext): Throwable = { - new ParseException(errorClass = "INVALID_PROPERTY_KEY", - messageParameters = Array(toSQLConf(keyCandidate), - toSQLConf(keyCandidate), toSQLConf(valueStr)), + new ParseException( + errorClass = "INVALID_PROPERTY_KEY", + messageParameters = Map( + "key" -> toSQLConf(keyCandidate), + "value" -> toSQLConf(valueStr)), ctx) } def invalidPropertyValueForSetQuotedConfigurationError( valueCandidate: String, keyStr: String, ctx: ParserRuleContext): Throwable = { - new ParseException(errorClass = "INVALID_PROPERTY_VALUE", - messageParameters = Array(toSQLConf(valueCandidate), - toSQLConf(keyStr), toSQLConf(valueCandidate)), + new ParseException( + errorClass = "INVALID_PROPERTY_VALUE", + messageParameters = Map( + "value" -> toSQLConf(valueCandidate), + "key" -> toSQLConf(keyStr)), ctx) } @@ -473,33 +502,40 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { def createFuncWithBothIfNotExistsAndReplaceError(ctx: CreateFunctionContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", - messageParameters = Array( - s"${toSQLStmt("CREATE FUNCTION")} with both ${toSQLStmt("IF NOT EXISTS")} " + - s"and ${toSQLStmt("REPLACE")} is not allowed."), + messageParameters = Map( + "inputString" -> + (s"${toSQLStmt("CREATE FUNCTION")} with both ${toSQLStmt("IF NOT EXISTS")} " + + s"and ${toSQLStmt("REPLACE")} is not allowed.")), ctx) } def defineTempFuncWithIfNotExistsError(ctx: CreateFunctionContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", - messageParameters = Array( - s"It is not allowed to define a ${toSQLStmt("TEMPORARY FUNCTION")}" + - s" with ${toSQLStmt("IF NOT EXISTS")}."), ctx) + messageParameters = Map( + "inputString" -> + (s"It is not allowed to define a ${toSQLStmt("TEMPORARY FUNCTION")}" + + s" with ${toSQLStmt("IF NOT EXISTS")}.")), + ctx) } def unsupportedFunctionNameError(funcName: Seq[String], ctx: CreateFunctionContext): Throwable = { - new ParseException("INVALID_SQL_SYNTAX", - Array(s"Unsupported function name ${toSQLId(funcName)}"), ctx) + new ParseException( + errorClass = "INVALID_SQL_SYNTAX", + messageParameters = Map( + "inputString" -> s"Unsupported function name ${toSQLId(funcName)}"), + ctx) } def specifyingDBInCreateTempFuncError( databaseName: String, ctx: CreateFunctionContext): Throwable = { new ParseException( - "INVALID_SQL_SYNTAX", - Array( - s"Specifying a database in ${toSQLStmt("CREATE TEMPORARY FUNCTION")} is not allowed: " + - toSQLId(databaseName)), + errorClass = "INVALID_SQL_SYNTAX", + messageParameters = Map( + "inputString" -> + (s"Specifying a database in ${toSQLStmt("CREATE TEMPORARY FUNCTION")} is not allowed: " + + toSQLId(databaseName))), ctx) } @@ -507,8 +543,11 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { name: Seq[String], ctx: TableValuedFunctionContext): Throwable = { new ParseException( - "INVALID_SQL_SYNTAX", - Array("table valued function cannot specify database name ", toSQLId(name)), ctx) + errorClass = "INVALID_SQL_SYNTAX", + messageParameters = Map( + "inputString" -> + ("table valued function cannot specify database name: " + toSQLId(name))), + ctx) } def unclosedBracketedCommentError(command: String, position: Origin): Throwable = { @@ -522,9 +561,10 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { def invalidNameForDropTempFunc(name: Seq[String], ctx: ParserRuleContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", - messageParameters = Array( - s"${toSQLStmt("DROP TEMPORARY FUNCTION")} requires a single part name but got: " + - toSQLId(name)), + messageParameters = Map( + "inputString" -> + (s"${toSQLStmt("DROP TEMPORARY FUNCTION")} requires a single part name but got: " + + toSQLId(name))), ctx) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala index 553f46a00c27c..29c28f85e0ee0 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala @@ -116,7 +116,7 @@ class AnalysisErrorSuite extends AnalysisTest { name: String, plan: LogicalPlan, errorClass: String, - messageParameters: Array[String]): Unit = { + messageParameters: Map[String, String]): Unit = { errorClassTest(name, plan, errorClass, null, messageParameters) } @@ -125,7 +125,7 @@ class AnalysisErrorSuite extends AnalysisTest { plan: LogicalPlan, errorClass: String, errorSubClass: String, - messageParameters: Array[String]): Unit = { + messageParameters: Map[String, String]): Unit = { test(name) { assertAnalysisErrorClass(plan, errorClass, errorSubClass, messageParameters, caseSensitive = true) @@ -303,7 +303,7 @@ class AnalysisErrorSuite extends AnalysisTest { testRelation.select($"abcd"), "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`abcd`", "`a`")) + Map("objectName" -> "`abcd`", "proposal" -> "`a`")) errorClassTest( "unresolved attributes with a generated name", @@ -312,7 +312,7 @@ class AnalysisErrorSuite extends AnalysisTest { .orderBy($"havingCondition".asc), "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`havingCondition`", "`max(b)`")) + Map("objectName" -> "`havingCondition`", "proposal" -> "`max(b)`")) errorTest( "unresolved star expansion in max", @@ -329,7 +329,7 @@ class AnalysisErrorSuite extends AnalysisTest { testRelation2.groupBy($"a", $"c")($"a", $"c", count($"a").as("a3")).orderBy($"b".asc), "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`b`", "`a`, `c`, `a3`")) + Map("objectName" -> "`b`", "proposal" -> "`a`, `c`, `a3`")) errorTest( "non-boolean filters", @@ -424,7 +424,7 @@ class AnalysisErrorSuite extends AnalysisTest { testRelation2.where($"bad_column" > 1).groupBy($"a")(UnresolvedAlias(max($"b"))), "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`bad_column`", "`a`, `b`, `c`, `d`, `e`")) + Map("objectName" -> "`bad_column`", "proposal" -> "`a`, `b`, `c`, `d`, `e`")) errorTest( "slide duration greater than window in time window", diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala index 1fd001c27d615..e161a61cdf92a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala @@ -105,7 +105,7 @@ class AnalysisSuite extends AnalysisTest with Matchers { SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))), "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`tBl`.`a`", "`TbL`.`a`"), + Map("objectName" -> "`tBl`.`a`", "proposal" -> "`TbL`.`a`"), caseSensitive = true) checkAnalysisWithoutViewWrapper( @@ -715,7 +715,7 @@ class AnalysisSuite extends AnalysisTest with Matchers { assertAnalysisErrorClass(parsePlan("WITH t(x) AS (SELECT 1) SELECT * FROM t WHERE y = 1"), "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`y`", "`t`.`x`"), + Map("objectName" -> "`y`", "proposal" -> "`t`.`x`"), caseSensitive = true) } @@ -1155,7 +1155,7 @@ class AnalysisSuite extends AnalysisTest with Matchers { |""".stripMargin), "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`c`.`y`", "`x`"), + Map("objectName" -> "`c`.`y`", "proposal" -> "`x`"), caseSensitive = true) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala index 94cb68a26f75b..8bb0588b536e6 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala @@ -173,7 +173,7 @@ trait AnalysisTest extends PlanTest { protected def assertAnalysisErrorClass( inputPlan: LogicalPlan, expectedErrorClass: String, - expectedMessageParameters: Array[String], + expectedMessageParameters: Map[String, String], caseSensitive: Boolean = true): Unit = { assertAnalysisErrorClass( inputPlan, @@ -187,7 +187,7 @@ trait AnalysisTest extends PlanTest { inputPlan: LogicalPlan, expectedErrorClass: String, expectedErrorSubClass: String, - expectedMessageParameters: Array[String], + expectedMessageParameters: Map[String, String], caseSensitive: Boolean): Unit = { withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) { val analyzer = getAnalyzer diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveSubquerySuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveSubquerySuite.scala index b3a19041220b2..ab4d79593e7b6 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveSubquerySuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveSubquerySuite.scala @@ -135,7 +135,7 @@ class ResolveSubquerySuite extends AnalysisTest { lateralJoin(t1, lateralJoin(t2, t0.select($"a", $"b", $"c"))), "UNRESOLVED_COLUMN", "WITHOUT_SUGGESTION", - Array("`a`"), + Map("objectName" -> "`a`"), caseSensitive = true) } @@ -145,28 +145,28 @@ class ResolveSubquerySuite extends AnalysisTest { lateralJoin(t1, t0.select($"a", $"c")), "UNRESOLVED_COLUMN", "WITHOUT_SUGGESTION", - Array("`c`"), + Map("objectName" -> "`c`"), caseSensitive = true) // SELECT * FROM t1, LATERAL (SELECT a, b, c, d FROM t2) assertAnalysisErrorClass( lateralJoin(t1, t2.select($"a", $"b", $"c", $"d")), "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`d`", "`b`, `c`"), + Map("objectName" -> "`d`", "proposal" -> "`b`, `c`"), caseSensitive = true) // SELECT * FROM t1, LATERAL (SELECT * FROM t2, LATERAL (SELECT t1.a)) assertAnalysisErrorClass( lateralJoin(t1, lateralJoin(t2, t0.select($"t1.a"))), "UNRESOLVED_COLUMN", "WITHOUT_SUGGESTION", - Array("`t1`.`a`"), + Map("objectName" -> "`t1`.`a`"), caseSensitive = true) // SELECT * FROM t1, LATERAL (SELECT * FROM t2, LATERAL (SELECT a, b)) assertAnalysisErrorClass( lateralJoin(t1, lateralJoin(t2, t0.select($"a", $"b"))), "UNRESOLVED_COLUMN", "WITHOUT_SUGGESTION", - Array("`a`"), + Map("objectName" -> "`a`"), caseSensitive = true) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/V2WriteAnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/V2WriteAnalysisSuite.scala index e5fda40cf516c..d5c9660c77a80 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/V2WriteAnalysisSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/V2WriteAnalysisSuite.scala @@ -692,7 +692,7 @@ abstract class V2WriteAnalysisSuiteBase extends AnalysisTest { parsedPlan, "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`a`", "`x`, `y`"), + Map("objectName" -> "`a`", "proposal" -> "`x`, `y`"), caseSensitive = true) val tableAcceptAnySchema = TestRelationAcceptAnySchema(StructType(Seq( @@ -706,7 +706,7 @@ abstract class V2WriteAnalysisSuiteBase extends AnalysisTest { parsedPlan2, "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`a`", "`x`, `y`"), + Map("objectName" -> "`a`", "proposal" -> "`x`, `y`"), caseSensitive = true) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeWindowSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeWindowSuite.scala index faa8e6fb0bfed..781c1c20783cb 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeWindowSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TimeWindowSuite.scala @@ -21,7 +21,7 @@ import scala.reflect.ClassTag import org.scalatest.PrivateMethodTester -import org.apache.spark.SparkFunSuite +import org.apache.spark.{SparkException, SparkFunSuite} import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.internal.SQLConf @@ -30,7 +30,7 @@ import org.apache.spark.sql.types.{LongType, StructField, StructType, TimestampN class TimeWindowSuite extends SparkFunSuite with ExpressionEvalHelper with PrivateMethodTester { test("time window is unevaluable") { - intercept[UnsupportedOperationException] { + intercept[SparkException] { evaluateWithoutCodegen(TimeWindow(Literal(10L), "1 second", "1 second", "0 second")) } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala index 52d0c6c701820..b99fd089f0ae4 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala @@ -215,10 +215,10 @@ class ErrorParserSuite extends AnalysisTest { // special handling on char and varchar intercept("SELECT cast('a' as CHAR)", "PARSE_CHAR_MISSING_LENGTH", 1, 19, 19, - "DataType char requires a length parameter") + "DataType \"CHAR\" requires a length parameter") intercept("SELECT cast('a' as Varchar)", "PARSE_CHAR_MISSING_LENGTH", 1, 19, 19, - "DataType varchar requires a length parameter") + "DataType \"VARCHAR\" requires a length parameter") intercept("SELECT cast('a' as Character)", "PARSE_CHAR_MISSING_LENGTH", 1, 19, 19, - "DataType character requires a length parameter") + "DataType \"CHARACTER\" requires a length parameter") } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala index d9fc877c321cd..10b763b1b5134 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala @@ -497,8 +497,8 @@ object QueryExecution { case e @ (_: java.lang.NullPointerException | _: java.lang.AssertionError) => new SparkException( errorClass = "INTERNAL_ERROR", - messageParameters = Array(msg + - " Please, fill a bug report in, and provide the full stack trace."), + messageParameters = Map("message" -> (msg + + " Please, fill a bug report in, and provide the full stack trace.")), cause = e) case e: Throwable => e diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_aggregates.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_aggregates.sql.out index fa55ace4d2e5f..2aee862ad2272 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_aggregates.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_aggregates.sql.out @@ -157,7 +157,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -365,7 +365,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out index 914ee064c5127..3f317a0b982f1 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out @@ -51,7 +51,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "integer overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -75,7 +75,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -266,7 +266,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "integer overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -290,7 +290,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -424,7 +424,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "integer overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -448,7 +448,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -566,7 +566,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "integer overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -590,7 +590,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out index 5b9edcf763fd4..7b2382a4f5710 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out @@ -203,7 +203,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "integer overflow", "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -238,7 +238,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "integer overflow", "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -273,7 +273,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "integer overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -309,7 +309,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "integer overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -345,7 +345,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "integer overflow", "alternative" : " Use 'try_subtract' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -381,7 +381,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "integer overflow", "alternative" : " Use 'try_subtract' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out index e4056ac2e6c8c..39a8a2441afee 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out @@ -395,7 +395,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -910,7 +910,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -950,7 +950,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -990,7 +990,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_multiply' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out index 50027cb4a902d..7289de2c0c57b 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out @@ -228,7 +228,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", @@ -251,7 +251,7 @@ org.apache.spark.SparkArithmeticException "messageParameters" : { "message" : "long overflow", "alternative" : " Use 'try_add' to tolerate overflow and return NULL instead.", - "config" : "spark.sql.ansi.enabled" + "config" : "\"spark.sql.ansi.enabled\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 9dca09fa2e9d7..e74983bf5446c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -2784,11 +2784,13 @@ class DataFrameSuite extends QueryTest implicit val valueEncoder = RowEncoder(df.schema) - val err = intercept[AnalysisException] { - df.groupBy($"d", $"b").as[GroupByKey, Row] - } - assert(err.getErrorClass == "UNRESOLVED_COLUMN") - assert(err.messageParameters.head == "`d`") + checkError( + exception = intercept[AnalysisException] { + df.groupBy($"d", $"b").as[GroupByKey, Row] + }, + errorClass = "UNRESOLVED_COLUMN", + errorSubClass = Some("WITH_SUGGESTION"), + parameters = Map("objectName" -> "`d`", "proposal" -> "`a`, `b`, `c`")) } test("emptyDataFrame should be foldable") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index 3a77b12a1ecfa..11f4fe0649be4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -122,7 +122,9 @@ class DataSourceV2SQLSuiteV1Filter extends DataSourceV2SQLSuite with AlterTableT s"DESCRIBE $t invalid_col", "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`invalid_col`", "`testcat`.`tbl`.`id`, `testcat`.`tbl`.`data`")) + Map( + "objectName" -> "`invalid_col`", + "proposal" -> "`testcat`.`tbl`.`id`, `testcat`.`tbl`.`data`")) } } @@ -997,8 +999,9 @@ class DataSourceV2SQLSuiteV1Filter extends DataSourceV2SQLSuite with AlterTableT s"SELECT ns1.ns2.ns3.tbl.id from $t", "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array("`ns1`.`ns2`.`ns3`.`tbl`.`id`", - "`testcat`.`ns1`.`ns2`.`tbl`.`id`, `testcat`.`ns1`.`ns2`.`tbl`.`point`")) + Map( + "objectName" -> "`ns1`.`ns2`.`ns3`.`tbl`.`id`", + "proposal" -> "`testcat`.`ns1`.`ns2`.`tbl`.`id`, `testcat`.`ns1`.`ns2`.`tbl`.`point`")) } } @@ -1577,19 +1580,19 @@ class DataSourceV2SQLSuiteV1Filter extends DataSourceV2SQLSuite with AlterTableT s"UPDATE $t SET dummy='abc'", "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array( - "`dummy`", - "`testcat`.`ns1`.`ns2`.`tbl`.`p`, `testcat`.`ns1`.`ns2`.`tbl`.`id`, " + - "`testcat`.`ns1`.`ns2`.`tbl`.`age`, `testcat`.`ns1`.`ns2`.`tbl`.`name`")) + Map( + "objectName" -> "`dummy`", + "proposal" -> ("`testcat`.`ns1`.`ns2`.`tbl`.`p`, `testcat`.`ns1`.`ns2`.`tbl`.`id`, " + + "`testcat`.`ns1`.`ns2`.`tbl`.`age`, `testcat`.`ns1`.`ns2`.`tbl`.`name`"))) assertAnalysisErrorClass( s"UPDATE $t SET name='abc' WHERE dummy=1", "UNRESOLVED_COLUMN", "WITH_SUGGESTION", - Array( - "`dummy`", - "`testcat`.`ns1`.`ns2`.`tbl`.`p`, " + + Map( + "objectName" -> "`dummy`", + "proposal" -> ("`testcat`.`ns1`.`ns2`.`tbl`.`p`, " + "`testcat`.`ns1`.`ns2`.`tbl`.`id`, " + - "`testcat`.`ns1`.`ns2`.`tbl`.`age`, `testcat`.`ns1`.`ns2`.`tbl`.`name`")) + "`testcat`.`ns1`.`ns2`.`tbl`.`age`, `testcat`.`ns1`.`ns2`.`tbl`.`name`"))) // UPDATE is not implemented yet. val e = intercept[UnsupportedOperationException] { @@ -2428,7 +2431,7 @@ class DataSourceV2SQLSuiteV1Filter extends DataSourceV2SQLSuite with AlterTableT sqlStatement: String, expectedErrorClass: String, expectedErrorSubClass: String, - expectedErrorMessageParameters: Array[String]): Unit = { + expectedErrorMessageParameters: Map[String, String]): Unit = { val ex = intercept[AnalysisException] { sql(sqlStatement) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala index f07451b4dd560..2897ef6c39a8c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala @@ -413,7 +413,7 @@ class QueryCompilationErrorsSuite errorClass = "UNRESOLVED_MAP_KEY", errorSubClass = "WITH_SUGGESTION", sqlState = None, - parameters = Map("columnName" -> "`a`", + parameters = Map("objectName" -> "`a`", "proposal" -> "`__auto_generated_subquery_name`.`m`, `__auto_generated_subquery_name`.`aa`"), context = ExpectedContext( @@ -574,7 +574,8 @@ class QueryCompilationErrorsSuite exception = e1, errorClass = "UNSUPPORTED_DESERIALIZER", errorSubClass = Some("FIELD_NUMBER_MISMATCH"), - parameters = Map("schema" -> "\"STRUCT\"", + parameters = Map( + "schema" -> "\"STRUCT\"", "ordinal" -> "3")) val e2 = intercept[AnalysisException] { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala index e55ec53f927cc..0430d314a1ba3 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala @@ -650,7 +650,7 @@ class QueryExecutionErrorsSuite parameters = Map( "message" -> "integer overflow", "alternative" -> "", - "config" -> SQLConf.ANSI_ENABLED.key)) + "config" -> s""""${SQLConf.ANSI_ENABLED.key}"""")) } test("CAST_OVERFLOW: from long to ANSI intervals") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala index be438a37a6b70..cd46cc3b659bb 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala @@ -183,7 +183,8 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { exception = parseException("SELECT * FROM db.func()"), errorClass = "INVALID_SQL_SYNTAX", sqlState = "42000", - parameters = Map("inputString" -> "table valued function cannot specify database name "), + parameters = Map( + "inputString" -> "table valued function cannot specify database name: `db`.`func`"), context = ExpectedContext( fragment = "db.func()", start = 14, @@ -193,7 +194,8 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { exception = parseException("SELECT * FROM ns.db.func()"), errorClass = "INVALID_SQL_SYNTAX", sqlState = "42000", - parameters = Map("inputString" -> "table valued function cannot specify database name "), + parameters = Map( + "inputString" -> "table valued function cannot specify database name: `ns`.`db`.`func`"), context = ExpectedContext( fragment = "ns.db.func()", start = 14, diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala index 5c97821f11ecd..1852e13181674 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala @@ -36,7 +36,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType import org.apache.hadoop.io.compress.GzipCodec import org.apache.logging.log4j.Level -import org.apache.spark.{SparkConf, SparkException, SparkUpgradeException, TestUtils} +import org.apache.spark.{SparkConf, SparkException, SparkIllegalArgumentException, SparkUpgradeException, TestUtils} import org.apache.spark.sql.{AnalysisException, Column, DataFrame, Encoders, QueryTest, Row} import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils} import org.apache.spark.sql.execution.datasources.CommonFileDataSourceSuite @@ -2653,11 +2653,13 @@ abstract class CSVSuite .option("header", true) .csv(path.getCanonicalPath) checkAnswer(readback, Seq(Row(2, 3), Row(0, 1))) - val ex = intercept[AnalysisException] { - readback.filter($"AAA" === 2 && $"bbb" === 3).collect() - } - assert(ex.getErrorClass == "UNRESOLVED_COLUMN") - assert(ex.messageParameters.head == "`AAA`") + checkError( + exception = intercept[AnalysisException] { + readback.filter($"AAA" === 2 && $"bbb" === 3).collect() + }, + errorClass = "UNRESOLVED_COLUMN", + errorSubClass = Some("WITH_SUGGESTION"), + parameters = Map("objectName" -> "`AAA`", "proposal" -> "`BBB`, `aaa`")) } } } @@ -2805,13 +2807,11 @@ abstract class CSVSuite // Error should be thrown when attempting to prefersDate with Legacy parser if (SQLConf.get.legacyTimeParserPolicy == LegacyBehaviorPolicy.LEGACY) { - val msg = intercept[IllegalArgumentException] { - spark.read - .format("csv") - .options(options1) - .load(testFile(dateInferSchemaFile)) - }.getMessage - assert(msg.contains("CANNOT_INFER_DATE")) + checkError( + exception = intercept[SparkIllegalArgumentException] { + spark.read.format("csv").options(options1).load(testFile(dateInferSchemaFile)) + }, + errorClass = "CANNOT_INFER_DATE") } else { // 1. Specify date format and timestamp format // 2. Date inference should work with default date format when dateFormat is not provided @@ -2859,10 +2859,11 @@ abstract class CSVSuite .csv(path.getAbsolutePath) if (SQLConf.get.legacyTimeParserPolicy == LegacyBehaviorPolicy.LEGACY) { - val msg = intercept[IllegalArgumentException] { - output.collect() - }.getMessage - assert(msg.contains("CANNOT_INFER_DATE")) + checkError( + exception = intercept[SparkIllegalArgumentException] { + output.collect() + }, + errorClass = "CANNOT_INFER_DATE") } else { checkAnswer( output, diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala index f0801ae313e8c..a9e1d3a751ed5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala @@ -3041,11 +3041,13 @@ abstract class JsonSuite val readback = spark.read.schema("aaa integer, BBB integer") .json(path.getCanonicalPath) checkAnswer(readback, Seq(Row(null, null), Row(0, 1))) - val ex = intercept[AnalysisException] { - readback.filter($"AAA" === 0 && $"bbb" === 1).collect() - } - assert(ex.getErrorClass == "UNRESOLVED_COLUMN") - assert(ex.messageParameters.head == "`AAA`") + checkError( + exception = intercept[AnalysisException] { + readback.filter($"AAA" === 0 && $"bbb" === 1).collect() + }, + errorClass = "UNRESOLVED_COLUMN", + errorSubClass = Some("WITH_SUGGESTION"), + parameters = Map("objectName" -> "`AAA`", "proposal" -> "`BBB`, `aaa`")) // Schema inferring val readback2 = spark.read.json(path.getCanonicalPath) checkAnswer( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala index fa3241fe59bf3..b93bef4f28341 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala @@ -2092,19 +2092,30 @@ class InsertSuite extends DataSourceTest with SharedSparkSession { test("SPARK-33294: Add query resolved check before analyze InsertIntoDir") { withTempPath { path => - val ex = intercept[AnalysisException] { - sql( - s""" - |INSERT OVERWRITE DIRECTORY '${path.getAbsolutePath}' USING PARQUET - |SELECT * FROM ( - | SELECT c3 FROM ( - | SELECT c1, c2 from values(1,2) t(c1, c2) - | ) - |) - """.stripMargin) - } - assert(ex.getErrorClass == "UNRESOLVED_COLUMN") - assert(ex.messageParameters.head == "`c3`") + val insert = s"INSERT OVERWRITE DIRECTORY '${path.getAbsolutePath}' USING PARQUET" + checkError( + exception = intercept[AnalysisException] { + sql( + s""" + |$insert + |SELECT * FROM ( + | SELECT c3 FROM ( + | SELECT c1, c2 from values(1,2) t(c1, c2) + | ) + |) + """.stripMargin) + }, + errorClass = "UNRESOLVED_COLUMN", + errorSubClass = "WITH_SUGGESTION", + sqlState = "42000", + parameters = Map( + "objectName" -> "`c3`", + "proposal" -> + "`__auto_generated_subquery_name`.`c1`, `__auto_generated_subquery_name`.`c2`"), + context = ExpectedContext( + fragment = insert, + start = 1, + stop = insert.length)) } }