Skip to content

Commit f081650

Browse files
junyuc25MaxGekk
authored andcommitted
[SPARK-42307][SQL] Assign name for error _LEGACY_ERROR_TEMP_2232
### What changes were proposed in this pull request? In this PR, I propose to replace the legacy error name `_LEGACY_ERROR_TEMP_2232` with `ROW_VALUE_IS_NULL `, and add a test case for it. ### Why are the changes needed? Proper name improves user experience with Spark SQL. ### Does this PR introduce _any_ user-facing change? Yes. ### How was this patch tested? Ran all the tests in the suite: ``` build/sbt "testOnly *org.apache.spark.sql.RowSuite" ``` ### Was this patch authored or co-authored using generative AI tooling? No Closes #47354 from junyuc25/SPARK-42307. Lead-authored-by: junyuc25 <[email protected]> Co-authored-by: junyuc25 <=> Signed-off-by: Max Gekk <[email protected]>
1 parent 3f3d024 commit f081650

File tree

5 files changed

+34
-19
lines changed

5 files changed

+34
-19
lines changed

common/utils/src/main/resources/error/error-conditions.json

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3772,6 +3772,12 @@
37723772
],
37733773
"sqlState" : "21000"
37743774
},
3775+
"ROW_VALUE_IS_NULL" : {
3776+
"message" : [
3777+
"Found NULL in a row at the index <index>, expected a non-NULL value."
3778+
],
3779+
"sqlState" : "22023"
3780+
},
37753781
"RULE_ID_NOT_FOUND" : {
37763782
"message" : [
37773783
"Not found an id for the rule name \"<ruleName>\". Please modify RuleIdCollection.scala if you are adding a new rule."
@@ -7285,11 +7291,6 @@
72857291
"Primitive types are not supported."
72867292
]
72877293
},
7288-
"_LEGACY_ERROR_TEMP_2232" : {
7289-
"message" : [
7290-
"Value at index <index> is null."
7291-
]
7292-
},
72937294
"_LEGACY_ERROR_TEMP_2233" : {
72947295
"message" : [
72957296
"Only Data Sources providing FileFormat are supported: <providingClass>."

sql/api/src/main/scala/org/apache/spark/sql/Row.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -219,39 +219,39 @@ trait Row extends Serializable {
219219
* Returns the value at position i as a primitive boolean.
220220
*
221221
* @throws ClassCastException when data type does not match.
222-
* @throws NullPointerException when value is null.
222+
* @throws org.apache.spark.SparkRuntimeException when value is null.
223223
*/
224224
def getBoolean(i: Int): Boolean = getAnyValAs[Boolean](i)
225225

226226
/**
227227
* Returns the value at position i as a primitive byte.
228228
*
229229
* @throws ClassCastException when data type does not match.
230-
* @throws NullPointerException when value is null.
230+
* @throws org.apache.spark.SparkRuntimeException when value is null.
231231
*/
232232
def getByte(i: Int): Byte = getAnyValAs[Byte](i)
233233

234234
/**
235235
* Returns the value at position i as a primitive short.
236236
*
237237
* @throws ClassCastException when data type does not match.
238-
* @throws NullPointerException when value is null.
238+
* @throws org.apache.spark.SparkRuntimeException when value is null.
239239
*/
240240
def getShort(i: Int): Short = getAnyValAs[Short](i)
241241

242242
/**
243243
* Returns the value at position i as a primitive int.
244244
*
245245
* @throws ClassCastException when data type does not match.
246-
* @throws NullPointerException when value is null.
246+
* @throws org.apache.spark.SparkRuntimeException when value is null.
247247
*/
248248
def getInt(i: Int): Int = getAnyValAs[Int](i)
249249

250250
/**
251251
* Returns the value at position i as a primitive long.
252252
*
253253
* @throws ClassCastException when data type does not match.
254-
* @throws NullPointerException when value is null.
254+
* @throws org.apache.spark.SparkRuntimeException when value is null.
255255
*/
256256
def getLong(i: Int): Long = getAnyValAs[Long](i)
257257

@@ -260,15 +260,15 @@ trait Row extends Serializable {
260260
* Throws an exception if the type mismatches or if the value is null.
261261
*
262262
* @throws ClassCastException when data type does not match.
263-
* @throws NullPointerException when value is null.
263+
* @throws org.apache.spark.SparkRuntimeException when value is null.
264264
*/
265265
def getFloat(i: Int): Float = getAnyValAs[Float](i)
266266

267267
/**
268268
* Returns the value at position i as a primitive double.
269269
*
270270
* @throws ClassCastException when data type does not match.
271-
* @throws NullPointerException when value is null.
271+
* @throws org.apache.spark.SparkRuntimeException when value is null.
272272
*/
273273
def getDouble(i: Int): Double = getAnyValAs[Double](i)
274274

@@ -530,7 +530,7 @@ trait Row extends Serializable {
530530
*
531531
* @throws UnsupportedOperationException when schema is not defined.
532532
* @throws ClassCastException when data type does not match.
533-
* @throws NullPointerException when value is null.
533+
* @throws org.apache.spark.SparkRuntimeException when value is null.
534534
*/
535535
private def getAnyValAs[T <: AnyVal](i: Int): T =
536536
if (isNullAt(i)) throw DataTypeErrors.valueIsNullError(i)

sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -272,8 +272,8 @@ private[sql] object DataTypeErrors extends DataTypeErrorsBase {
272272
}
273273

274274
def valueIsNullError(index: Int): Throwable = {
275-
new SparkException(
276-
errorClass = "_LEGACY_ERROR_TEMP_2232",
275+
new SparkRuntimeException(
276+
errorClass = "ROW_VALUE_IS_NULL",
277277
messageParameters = Map(
278278
"index" -> index.toString),
279279
cause = null)

sql/catalyst/src/test/scala/org/apache/spark/sql/RowTest.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import org.scalatest.funspec.AnyFunSpec
2424
import org.scalatest.matchers.must.Matchers
2525
import org.scalatest.matchers.should.Matchers._
2626

27-
import org.apache.spark.{SparkException, SparkIllegalArgumentException, SparkUnsupportedOperationException}
27+
import org.apache.spark.{SparkIllegalArgumentException, SparkRuntimeException, SparkUnsupportedOperationException}
2828
import org.apache.spark.sql.catalyst.InternalRow
2929
import org.apache.spark.sql.catalyst.expressions.{GenericRow, GenericRowWithSchema}
3030
import org.apache.spark.sql.types._
@@ -87,8 +87,9 @@ class RowTest extends AnyFunSpec with Matchers {
8787
sampleRowWithoutCol3.getValuesMap[String](List("col1", "col2")) shouldBe expected
8888
}
8989

90-
it("getAs() on type extending AnyVal throws an exception when accessing field that is null") {
91-
intercept[SparkException] {
90+
it("getAnyValAs() on type extending AnyVal throws an exception when accessing " +
91+
"field that is null") {
92+
intercept[SparkRuntimeException] {
9293
sampleRowWithoutCol3.getInt(sampleRowWithoutCol3.fieldIndex("col3"))
9394
}
9495
}

sql/core/src/test/scala/org/apache/spark/sql/RowSuite.scala

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql
1919

20-
import org.apache.spark.{SparkFunSuite, SparkUnsupportedOperationException}
20+
import org.apache.spark.{SparkFunSuite, SparkRuntimeException, SparkUnsupportedOperationException}
2121
import org.apache.spark.sql.catalyst.expressions.{GenericInternalRow, SpecificInternalRow}
2222
import org.apache.spark.sql.test.SharedSparkSession
2323
import org.apache.spark.sql.types._
@@ -123,4 +123,17 @@ class RowSuite extends SparkFunSuite with SharedSparkSession {
123123
parameters = Map("methodName" -> "fieldIndex", "className" -> "Row", "fieldName" -> "`foo`")
124124
)
125125
}
126+
127+
test("SPARK-42307: get a value from a null column should result in error") {
128+
val position = 0
129+
val rowWithNullValue = Row.fromSeq(Seq(null))
130+
131+
checkError(
132+
exception = intercept[SparkRuntimeException] {
133+
rowWithNullValue.getLong(position)
134+
},
135+
errorClass = "ROW_VALUE_IS_NULL",
136+
parameters = Map("index" -> position.toString)
137+
)
138+
}
126139
}

0 commit comments

Comments
 (0)