Skip to content

Commit 9bf12f8

Browse files
committed
adding test case
1 parent 7787ec7 commit 9bf12f8

File tree

4 files changed

+138
-17
lines changed

4 files changed

+138
-17
lines changed

sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -49,21 +49,6 @@ private[sql] class DDLParser extends StandardTokenParsers with PackratParsers wi
4949
protected implicit def asParser(k: Keyword): Parser[String] =
5050
lexical.allCaseVersions(k.str).map(x => x : Parser[String]).reduce(_ | _)
5151

52-
// data types
53-
protected val STRING = Keyword("STRING")
54-
protected val DOUBLE = Keyword("DOUBLE")
55-
protected val BOOLEAN = Keyword("BOOLEAN")
56-
protected val FLOAT = Keyword("FLOAT")
57-
protected val INT = Keyword("INT")
58-
protected val TINYINT = Keyword("TINYINT")
59-
protected val SMALLINT = Keyword("SMALLINT")
60-
protected val BIGINT = Keyword("BIGINT")
61-
protected val BINARY = Keyword("BINARY")
62-
protected val DECIMAL = Keyword("DECIMAL")
63-
protected val DATE = Keyword("DATE")
64-
protected val TIMESTAMP = Keyword("TIMESTAMP")
65-
protected val VARCHAR = Keyword("VARCHAR")
66-
6752
protected val CREATE = Keyword("CREATE")
6853
protected val TEMPORARY = Keyword("TEMPORARY")
6954
protected val TABLE = Keyword("TABLE")

sql/core/src/main/scala/org/apache/spark/sql/sources/interfaces.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ package org.apache.spark.sql.sources
1818

1919
import org.apache.spark.annotation.{Experimental, DeveloperApi}
2020
import org.apache.spark.rdd.RDD
21-
import org.apache.spark.sql.{SQLConf, Row, SQLContext, StructType}
21+
import org.apache.spark.sql.{Row, SQLContext, StructType}
2222
import org.apache.spark.sql.catalyst.expressions.{Expression, Attribute}
2323

2424
/**

sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ class QueryTest extends PlanTest {
7070
""".stripMargin)
7171
}
7272

73-
if (prepareAnswer(convertedAnswer) != prepareAnswer(sparkAnswer)) {
73+
if (prepareAnswer(convertedAnswer) != prepareAnswer(sparkAnswer)) { // issues here, sparkAnswer may be GenericRow[]
7474
fail(s"""
7575
|Results do not match for query:
7676
|${rdd.logicalPlan}
Lines changed: 136 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,136 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql.sources
19+
20+
import org.apache.spark.sql._
21+
import java.sql.{Timestamp, Date}
22+
import org.apache.spark.sql.execution.RDDConversions
23+
24+
case class PrimaryData(
25+
stringField: String,
26+
intField: Int,
27+
longField: Long,
28+
floatField: Float,
29+
doubleField: Double,
30+
shortField: Short,
31+
byteField: Byte,
32+
booleanField: Boolean,
33+
decimalField: BigDecimal,
34+
date: Date,
35+
timestampField: Timestamp)
36+
37+
class AllDataTypesScanSource extends SchemaRelationProvider {
38+
override def createRelation(
39+
sqlContext: SQLContext,
40+
parameters: Map[String, String],
41+
schema: Option[StructType] = None): BaseRelation = {
42+
AllDataTypesScan(parameters("from").toInt, parameters("TO").toInt, schema)(sqlContext)
43+
}
44+
}
45+
46+
case class AllDataTypesScan(
47+
from: Int,
48+
to: Int,
49+
userSpecifiedSchema: Option[StructType])(@transient val sqlContext: SQLContext)
50+
extends TableScan {
51+
52+
override def schema = userSpecifiedSchema.get
53+
54+
override def buildScan() = {
55+
val rdd = sqlContext.sparkContext.parallelize(from to to).map { i =>
56+
PrimaryData(
57+
i.toString,
58+
i,
59+
i.toLong,
60+
i.toFloat,
61+
i.toDouble,
62+
i.toShort,
63+
i.toByte,
64+
true,
65+
BigDecimal(i),
66+
new Date(12345),
67+
new Timestamp(12345))
68+
}
69+
70+
RDDConversions.productToRowRdd(rdd, schema)
71+
}
72+
73+
}
74+
75+
class NewTableScanSuite extends DataSourceTest {
76+
import caseInsensisitiveContext._
77+
78+
var records = (1 to 10).map { i =>
79+
Row(
80+
i.toString,
81+
i,
82+
i.toLong,
83+
i.toFloat,
84+
i.toDouble,
85+
i.toShort,
86+
i.toByte,
87+
true,
88+
BigDecimal(i),
89+
new Date(12345),
90+
new Timestamp(12345))
91+
}.toSeq
92+
93+
before {
94+
sql(
95+
"""
96+
|CREATE TEMPORARY TABLE oneToTen(stringField string, intField int, longField bigint,
97+
|floatField float, doubleField double, shortField smallint, byteField tinyint,
98+
|booleanField boolean, decimalField decimal, dateField date, timestampField timestamp)
99+
|USING org.apache.spark.sql.sources.AllDataTypesScanSource
100+
|OPTIONS (
101+
| From '1',
102+
| To '10'
103+
|)
104+
""".stripMargin)
105+
}
106+
107+
sqlTest(
108+
"SELECT * FROM oneToTen",
109+
records)
110+
111+
sqlTest(
112+
"SELECT stringField FROM oneToTen",
113+
(1 to 10).map(i =>Row(i.toString)).toSeq)
114+
115+
sqlTest(
116+
"SELECT intField FROM oneToTen WHERE intField < 5",
117+
(1 to 4).map(Row(_)).toSeq)
118+
119+
sqlTest(
120+
"SELECT longField * 2 FROM oneToTen",
121+
(1 to 10).map(i => Row(i * 2.toLong)).toSeq)
122+
123+
sqlTest(
124+
"""SELECT a.floatField, b.floatField FROM oneToTen a JOIN oneToTen b
125+
|ON a.floatField = b.floatField + 1""".stripMargin,
126+
(2 to 10).map(i => Row(i.toFloat, i - 1.toFloat)).toSeq)
127+
128+
sqlTest(
129+
"SELECT distinct(a.dateField) FROM oneToTen a",
130+
Some(new Date(12345)).map(Row(_)).toSeq)
131+
132+
sqlTest(
133+
"SELECT distinct(a.timestampField) FROM oneToTen a",
134+
Some(new Timestamp(12345)).map(Row(_)).toSeq)
135+
136+
}

0 commit comments

Comments
 (0)