Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ case class GetArrayItem(child: Expression, ordinal: Expression)
// TODO: consider using Array[_] for ArrayType child to avoid
// boxing of primitives
val baseValue = value.asInstanceOf[Seq[_]]
val index = ordinal.asInstanceOf[Int]
val index = ordinal.asInstanceOf[Number].intValue()
if (index >= baseValue.size || index < 0) {
null
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.spark.sql.catalyst.expressions

import org.apache.spark.sql.catalyst
import org.apache.spark.sql.types._


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,26 @@ import org.apache.spark.unsafe.types.UTF8String

class ComplexTypeSuite extends SparkFunSuite with ExpressionEvalHelper {

/**
* Runs through the testFunc for all integral data types.
*
* @param testFunc a test function that accepts a conversion function to convert an integer
* into another data type.
*/
private def testIntegralDataTypes(testFunc: (Int => Any) => Unit): Unit = {
testFunc(_.toByte)
testFunc(_.toShort)
testFunc(identity)
testFunc(_.toLong)
}

test("GetArrayItem") {
testIntegralDataTypes { convert =>
val array = Literal.create(Seq("a", "b"), ArrayType(StringType))
checkEvaluation(GetArrayItem(array, Literal(convert(1))), "b")
}
}

test("CreateStruct") {
val row = InternalRow(1, 2, 3)
val c1 = 'a.int.at(0).as("a")
Expand Down