Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -239,12 +239,6 @@
}
}
},
"INDEX_OUT_OF_BOUNDS" : {
"message" : [
"Index <indexValue> must be between 0 and the length of the ArrayData."
],
"sqlState" : "22023"
},
"INTERNAL_ERROR" : {
"message" : [
"<message>"
Expand Down
16 changes: 0 additions & 16 deletions core/src/main/scala/org/apache/spark/SparkException.scala
Original file line number Diff line number Diff line change
Expand Up @@ -316,22 +316,6 @@ private[spark] class SparkIllegalArgumentException(
override def getQueryContext: Array[QueryContext] = context
}

/**
* Index out of bounds exception thrown from Spark with an error class.
*/
private[spark] class SparkIndexOutOfBoundsException(
errorClass: String,
errorSubClass: Option[String] = None,
messageParameters: Array[String])
extends IndexOutOfBoundsException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters))
with SparkThrowable {

override def getMessageParameters: Array[String] = messageParameters
override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
}

/**
* IO exception thrown from Spark with an error class.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.sql.catalyst.util

import scala.reflect.ClassTag

import org.apache.spark.SparkException
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{SpecializedGetters, UnsafeArrayData}
import org.apache.spark.sql.errors.QueryExecutionErrors
Expand Down Expand Up @@ -200,7 +201,8 @@ class ArrayDataIndexedSeq[T](arrayData: ArrayData, dataType: DataType) extends I
if (0 <= idx && idx < arrayData.numElements()) {
accessor(arrayData, idx).asInstanceOf[T]
} else {
throw QueryExecutionErrors.indexOutOfBoundsOfArrayDataError(idx)
throw SparkException.internalError(
s"Index $idx must be between 0 and the length of the ArrayData.")
}

override def length: Int = arrayData.numElements()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1372,11 +1372,6 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
""".stripMargin.replaceAll("\n", " "))
}

def indexOutOfBoundsOfArrayDataError(idx: Int): Throwable = {
new SparkIndexOutOfBoundsException(
errorClass = "INDEX_OUT_OF_BOUNDS", None, Array(toSQLValue(idx, IntegerType)))
}

def malformedRecordsDetectedInRecordParsingError(e: BadRecordException): Throwable = {
new SparkException("Malformed records are detected in record parsing. " +
s"Parse Mode: ${FailFastMode.name}. To process malformed records as null " +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.util

import scala.util.Random

import org.apache.spark.SparkFunSuite
import org.apache.spark.{SparkException, SparkFunSuite}
import org.apache.spark.sql.RandomDataGenerator
import org.apache.spark.sql.catalyst.encoders.{ExamplePointUDT, RowEncoder}
import org.apache.spark.sql.catalyst.expressions.{SafeProjection, UnsafeProjection}
Expand Down Expand Up @@ -53,13 +53,15 @@ class ArrayDataIndexedSeqSuite extends SparkFunSuite {
}
}

intercept[IndexOutOfBoundsException] {
seq(-1)
}.getMessage().contains("must be between 0 and the length of the ArrayData.")

intercept[IndexOutOfBoundsException] {
seq(seq.length)
}.getMessage().contains("must be between 0 and the length of the ArrayData.")
Seq(-1, seq.length).foreach { index =>
checkError(
exception = intercept[SparkException] {
seq(index)
},
errorClass = "INTERNAL_ERROR",
parameters = Map(
"message" -> s"Index $index must be between 0 and the length of the ArrayData."))
}
}

private def testArrayData(): Unit = {
Expand Down