Skip to content

Commit a95043b

Browse files
committed
[SPARK-6428][SQL] Added explicit type for all public methods in sql/core
Also implemented equals/hashCode when they are missing. This is done in order to enable automatic public method type checking. Author: Reynold Xin <[email protected]> Closes apache#5104 from rxin/sql-hashcode-explicittype and squashes the following commits: ffce6f3 [Reynold Xin] Code review feedback. 8b36733 [Reynold Xin] [SPARK-6428][SQL] Added explicit type for all public methods.
1 parent 257cde7 commit a95043b

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+438
-330
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/AttributeSet.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.sql.catalyst.expressions
1919

20-
import org.apache.spark.sql.catalyst.analysis.Star
2120

2221
protected class AttributeEquals(val a: Attribute) {
2322
override def hashCode() = a match {
@@ -115,7 +114,7 @@ class AttributeSet private (val baseSet: Set[AttributeEquals])
115114
// sorts of things in its closure.
116115
override def toSeq: Seq[Attribute] = baseSet.map(_.a).toArray.toSeq
117116

118-
override def toString = "{" + baseSet.map(_.a).mkString(", ") + "}"
117+
override def toString: String = "{" + baseSet.map(_.a).mkString(", ") + "}"
119118

120119
override def isEmpty: Boolean = baseSet.isEmpty
121120
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/rows.scala

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,27 @@ class GenericRow(protected[sql] val values: Array[Any]) extends Row {
146146
result
147147
}
148148

149+
override def equals(o: Any): Boolean = o match {
150+
case other: Row =>
151+
if (values.length != other.length) {
152+
return false
153+
}
154+
155+
var i = 0
156+
while (i < values.length) {
157+
if (isNullAt(i) != other.isNullAt(i)) {
158+
return false
159+
}
160+
if (apply(i) != other.apply(i)) {
161+
return false
162+
}
163+
i += 1
164+
}
165+
true
166+
167+
case _ => false
168+
}
169+
149170
def copy() = this
150171
}
151172

sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,7 @@ final class Decimal extends Ordered[Decimal] with Serializable {
246246
}
247247
}
248248

249-
override def equals(other: Any) = other match {
249+
override def equals(other: Any): Boolean = other match {
250250
case d: Decimal =>
251251
compare(d) == 0
252252
case _ =>

sql/catalyst/src/main/scala/org/apache/spark/sql/types/dataTypes.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -307,7 +307,7 @@ protected[sql] object NativeType {
307307

308308

309309
protected[sql] trait PrimitiveType extends DataType {
310-
override def isPrimitive = true
310+
override def isPrimitive: Boolean = true
311311
}
312312

313313

@@ -442,7 +442,7 @@ class TimestampType private() extends NativeType {
442442
@transient private[sql] lazy val tag = ScalaReflectionLock.synchronized { typeTag[JvmType] }
443443

444444
private[sql] val ordering = new Ordering[JvmType] {
445-
def compare(x: Timestamp, y: Timestamp) = x.compareTo(y)
445+
def compare(x: Timestamp, y: Timestamp): Int = x.compareTo(y)
446446
}
447447

448448
/**
@@ -542,7 +542,7 @@ class LongType private() extends IntegralType {
542542
*/
543543
override def defaultSize: Int = 8
544544

545-
override def simpleString = "bigint"
545+
override def simpleString: String = "bigint"
546546

547547
private[spark] override def asNullable: LongType = this
548548
}
@@ -572,7 +572,7 @@ class IntegerType private() extends IntegralType {
572572
*/
573573
override def defaultSize: Int = 4
574574

575-
override def simpleString = "int"
575+
override def simpleString: String = "int"
576576

577577
private[spark] override def asNullable: IntegerType = this
578578
}
@@ -602,7 +602,7 @@ class ShortType private() extends IntegralType {
602602
*/
603603
override def defaultSize: Int = 2
604604

605-
override def simpleString = "smallint"
605+
override def simpleString: String = "smallint"
606606

607607
private[spark] override def asNullable: ShortType = this
608608
}
@@ -632,7 +632,7 @@ class ByteType private() extends IntegralType {
632632
*/
633633
override def defaultSize: Int = 1
634634

635-
override def simpleString = "tinyint"
635+
override def simpleString: String = "tinyint"
636636

637637
private[spark] override def asNullable: ByteType = this
638638
}
@@ -696,7 +696,7 @@ case class DecimalType(precisionInfo: Option[PrecisionInfo]) extends FractionalT
696696
*/
697697
override def defaultSize: Int = 4096
698698

699-
override def simpleString = precisionInfo match {
699+
override def simpleString: String = precisionInfo match {
700700
case Some(PrecisionInfo(precision, scale)) => s"decimal($precision,$scale)"
701701
case None => "decimal(10,0)"
702702
}
@@ -836,7 +836,7 @@ case class ArrayType(elementType: DataType, containsNull: Boolean) extends DataT
836836
*/
837837
override def defaultSize: Int = 100 * elementType.defaultSize
838838

839-
override def simpleString = s"array<${elementType.simpleString}>"
839+
override def simpleString: String = s"array<${elementType.simpleString}>"
840840

841841
private[spark] override def asNullable: ArrayType =
842842
ArrayType(elementType.asNullable, containsNull = true)
@@ -1065,7 +1065,7 @@ case class StructType(fields: Array[StructField]) extends DataType with Seq[Stru
10651065
*/
10661066
override def defaultSize: Int = fields.map(_.dataType.defaultSize).sum
10671067

1068-
override def simpleString = {
1068+
override def simpleString: String = {
10691069
val fieldTypes = fields.map(field => s"${field.name}:${field.dataType.simpleString}")
10701070
s"struct<${fieldTypes.mkString(",")}>"
10711071
}
@@ -1142,7 +1142,7 @@ case class MapType(
11421142
*/
11431143
override def defaultSize: Int = 100 * (keyType.defaultSize + valueType.defaultSize)
11441144

1145-
override def simpleString = s"map<${keyType.simpleString},${valueType.simpleString}>"
1145+
override def simpleString: String = s"map<${keyType.simpleString},${valueType.simpleString}>"
11461146

11471147
private[spark] override def asNullable: MapType =
11481148
MapType(keyType.asNullable, valueType.asNullable, valueContainsNull = true)

sql/core/src/main/scala/org/apache/spark/sql/Column.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ class Column(protected[sql] val expr: Expression) {
5959

6060
override def toString: String = expr.prettyString
6161

62-
override def equals(that: Any) = that match {
62+
override def equals(that: Any): Boolean = that match {
6363
case that: Column => that.expr.equals(this.expr)
6464
case _ => false
6565
}

sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ import org.apache.spark.api.java.JavaRDD
3333
import org.apache.spark.api.python.SerDeUtil
3434
import org.apache.spark.rdd.RDD
3535
import org.apache.spark.storage.StorageLevel
36-
import org.apache.spark.sql.catalyst.{ScalaReflection, SqlParser}
36+
import org.apache.spark.sql.catalyst.{expressions, ScalaReflection, SqlParser}
3737
import org.apache.spark.sql.catalyst.analysis.{UnresolvedRelation, ResolvedStar}
3838
import org.apache.spark.sql.catalyst.expressions._
3939
import org.apache.spark.sql.catalyst.plans.{JoinType, Inner}
@@ -722,7 +722,7 @@ class DataFrame private[sql](
722722
: DataFrame = {
723723
val dataType = ScalaReflection.schemaFor[B].dataType
724724
val attributes = AttributeReference(outputColumn, dataType)() :: Nil
725-
def rowFunction(row: Row) = {
725+
def rowFunction(row: Row): TraversableOnce[Row] = {
726726
f(row(0).asInstanceOf[A]).map(o => Row(ScalaReflection.convertToCatalyst(o, dataType)))
727727
}
728728
val generator = UserDefinedGenerator(attributes, rowFunction, apply(inputColumn).expr :: Nil)
@@ -1155,7 +1155,7 @@ class DataFrame private[sql](
11551155
val gen = new JsonFactory().createGenerator(writer).setRootValueSeparator(null)
11561156

11571157
new Iterator[String] {
1158-
override def hasNext = iter.hasNext
1158+
override def hasNext: Boolean = iter.hasNext
11591159
override def next(): String = {
11601160
JsonRDD.rowToJSON(rowSchema, gen)(iter.next())
11611161
gen.flush()

sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
144144

145145
@transient
146146
protected[sql] val tlSession = new ThreadLocal[SQLSession]() {
147-
override def initialValue = defaultSession
147+
override def initialValue: SQLSession = defaultSession
148148
}
149149

150150
@transient
@@ -988,9 +988,9 @@ class SQLContext(@transient val sparkContext: SparkContext)
988988

989989
val sqlContext: SQLContext = self
990990

991-
def codegenEnabled = self.conf.codegenEnabled
991+
def codegenEnabled: Boolean = self.conf.codegenEnabled
992992

993-
def numPartitions = self.conf.numShufflePartitions
993+
def numPartitions: Int = self.conf.numShufflePartitions
994994

995995
def strategies: Seq[Strategy] =
996996
experimental.extraStrategies ++ (
@@ -1109,7 +1109,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
11091109

11101110
lazy val analyzed: LogicalPlan = analyzer(logical)
11111111
lazy val withCachedData: LogicalPlan = {
1112-
assertAnalyzed
1112+
assertAnalyzed()
11131113
cacheManager.useCachedData(analyzed)
11141114
}
11151115
lazy val optimizedPlan: LogicalPlan = optimizer(withCachedData)

sql/core/src/main/scala/org/apache/spark/sql/UDFRegistration.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ class UDFRegistration private[sql] (sqlContext: SQLContext) extends Logging {
6161

6262
val dataType = sqlContext.parseDataType(stringDataType)
6363

64-
def builder(e: Seq[Expression]) =
64+
def builder(e: Seq[Expression]): PythonUDF =
6565
PythonUDF(
6666
name,
6767
command,

sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnAccessor.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,9 @@ private[sql] abstract class BasicColumnAccessor[T <: DataType, JvmType](
4848

4949
protected def initialize() {}
5050

51-
def hasNext = buffer.hasRemaining
51+
override def hasNext: Boolean = buffer.hasRemaining
5252

53-
def extractTo(row: MutableRow, ordinal: Int): Unit = {
53+
override def extractTo(row: MutableRow, ordinal: Int): Unit = {
5454
extractSingle(row, ordinal)
5555
}
5656

sql/core/src/main/scala/org/apache/spark/sql/columnar/ColumnBuilder.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ private[sql] class BasicColumnBuilder[T <: DataType, JvmType](
5858
override def initialize(
5959
initialSize: Int,
6060
columnName: String = "",
61-
useCompression: Boolean = false) = {
61+
useCompression: Boolean = false): Unit = {
6262

6363
val size = if (initialSize == 0) DEFAULT_INITIAL_BUFFER_SIZE else initialSize
6464
this.columnName = columnName
@@ -73,7 +73,7 @@ private[sql] class BasicColumnBuilder[T <: DataType, JvmType](
7373
columnType.append(row, ordinal, buffer)
7474
}
7575

76-
override def build() = {
76+
override def build(): ByteBuffer = {
7777
buffer.flip().asInstanceOf[ByteBuffer]
7878
}
7979
}

0 commit comments

Comments
 (0)