Skip to content

Commit 8dfbf7a

Browse files
committed
more tests for complex data type
1 parent ddab984 commit 8dfbf7a

File tree

1 file changed

+33
-6
lines changed

1 file changed

+33
-6
lines changed

sql/core/src/test/scala/org/apache/spark/sql/sources/NewTableScanSuite.scala

Lines changed: 33 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import org.apache.spark.sql._
2121
import java.sql.{Timestamp, Date}
2222
import org.apache.spark.sql.execution.RDDConversions
2323

24-
case class PrimaryData(
24+
case class AllDataTypesData(
2525
stringField: String,
2626
intField: Int,
2727
longField: Long,
@@ -32,7 +32,10 @@ case class PrimaryData(
3232
booleanField: Boolean,
3333
decimalField: BigDecimal,
3434
date: Date,
35-
timestampField: Timestamp)
35+
timestampField: Timestamp,
36+
arrayFiled: Seq[Int],
37+
mapField: Map[Int, String],
38+
structField: Row)
3639

3740
class AllDataTypesScanSource extends SchemaRelationProvider {
3841
override def createRelation(
@@ -53,7 +56,7 @@ case class AllDataTypesScan(
5356

5457
override def buildScan() = {
5558
val rdd = sqlContext.sparkContext.parallelize(from to to).map { i =>
56-
PrimaryData(
59+
AllDataTypesData(
5760
i.toString,
5861
i,
5962
i.toLong,
@@ -64,7 +67,10 @@ case class AllDataTypesScan(
6467
true,
6568
BigDecimal(i),
6669
new Date(12345),
67-
new Timestamp(12345))
70+
new Timestamp(12345),
71+
Seq(i, i+1),
72+
Map(i -> i.toString),
73+
Row(i, i.toString))
6874
}
6975

7076
RDDConversions.productToRowRdd(rdd, schema)
@@ -87,15 +93,20 @@ class NewTableScanSuite extends DataSourceTest {
8793
true,
8894
BigDecimal(i),
8995
new Date(12345),
90-
new Timestamp(12345))
96+
new Timestamp(12345),
97+
Seq(i, i+1),
98+
Map(i -> i.toString),
99+
Row(i, i.toString))
91100
}.toSeq
92101

93102
before {
94103
sql(
95104
"""
96105
|CREATE TEMPORARY TABLE oneToTen(stringField stRIng, intField iNt, longField Bigint,
97106
|floatField flOat, doubleField doubLE, shortField smaLlint, byteField tinyint,
98-
|booleanField boolean, decimalField decimal(10,2), dateField dAte, timestampField tiMestamp)
107+
|booleanField boolean, decimalField decimal(10,2), dateField dAte,
108+
|timestampField tiMestamp, arrayField Array<inT>, mapField MAP<iNt, StRing>,
109+
|structField StRuct<key:INt, value:STrINg>)
99110
|USING org.apache.spark.sql.sources.AllDataTypesScanSource
100111
|OPTIONS (
101112
| From '1',
@@ -108,6 +119,10 @@ class NewTableScanSuite extends DataSourceTest {
108119
"SELECT * FROM oneToTen",
109120
records)
110121

122+
sqlTest(
123+
"SELECT count(*) FROM oneToTen",
124+
10)
125+
111126
sqlTest(
112127
"SELECT stringField FROM oneToTen",
113128
(1 to 10).map(i =>Row(i.toString)).toSeq)
@@ -133,4 +148,16 @@ class NewTableScanSuite extends DataSourceTest {
133148
"SELECT distinct(a.timestampField) FROM oneToTen a",
134149
Some(new Timestamp(12345)).map(Row(_)).toSeq)
135150

151+
sqlTest(
152+
"SELECT distinct(arrayField) FROM oneToTen a where intField=1",
153+
Some(Seq(1, 2)).map(Row(_)).toSeq)
154+
155+
sqlTest(
156+
"SELECT distinct(mapField) FROM oneToTen a where intField=1",
157+
Some(Map(1 -> 1.toString)).map(Row(_)).toSeq)
158+
159+
sqlTest(
160+
"SELECT distinct(structField) FROM oneToTen a where intField=1",
161+
Some(Row(1, "1")).map(Row(_)).toSeq)
162+
136163
}

0 commit comments

Comments
 (0)