Skip to content

Commit 8c5ff3e

Browse files
committed
Changed syntax of unit returning methods
1 parent 02d0778 commit 8c5ff3e

File tree

8 files changed

+25
-26
lines changed

8 files changed

+25
-26
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1110,8 +1110,8 @@ object SparkContext extends Logging {
11101110
}
11111111

11121112
// Helper objects for converting common types to Writable
1113-
private def simpleWritableConverter[T, W <: Writable: ClassTag](convert: W => T):
1114-
WritableConverter[T] = {
1113+
private def simpleWritableConverter[T, W <: Writable: ClassTag](convert: W => T)
1114+
: WritableConverter[T] = {
11151115
val wClass = classTag[W].runtimeClass.asInstanceOf[Class[W]]
11161116
new WritableConverter[T](_ => wClass, x => convert(x.asInstanceOf[W]))
11171117
}
@@ -1135,7 +1135,8 @@ object SparkContext extends Logging {
11351135
simpleWritableConverter[Array[Byte], BytesWritable](_.getBytes)
11361136
}
11371137

1138-
implicit def stringWritableConverter() = simpleWritableConverter[String, Text](_.toString)
1138+
implicit def stringWritableConverter(): WritableConverter[String] =
1139+
simpleWritableConverter[String, Text](_.toString)
11391140

11401141
implicit def writableWritableConverter[T <: Writable]() =
11411142
new WritableConverter[T](_.runtimeClass.asInstanceOf[Class[T]], _.asInstanceOf[T])

core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -391,22 +391,22 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
391391
/**
392392
* Save this RDD as a text file, using string representations of elements.
393393
*/
394-
def saveAsTextFile(path: String) {
394+
def saveAsTextFile(path: String): Unit = {
395395
rdd.saveAsTextFile(path)
396396
}
397397

398398

399399
/**
400400
* Save this RDD as a compressed text file, using string representations of elements.
401401
*/
402-
def saveAsTextFile(path: String, codec: Class[_ <: CompressionCodec]) {
402+
def saveAsTextFile(path: String, codec: Class[_ <: CompressionCodec]): Unit = {
403403
rdd.saveAsTextFile(path, codec)
404404
}
405405

406406
/**
407407
* Save this RDD as a SequenceFile of serialized objects.
408408
*/
409-
def saveAsObjectFile(path: String) {
409+
def saveAsObjectFile(path: String): Unit = {
410410
rdd.saveAsObjectFile(path)
411411
}
412412

@@ -425,7 +425,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
425425
* executed on this RDD. It is strongly recommended that this RDD is persisted in
426426
* memory, otherwise saving it on a file will require recomputation.
427427
*/
428-
def checkpoint() {
428+
def checkpoint(): Unit = {
429429
rdd.checkpoint()
430430
}
431431

core/src/main/scala/org/apache/spark/util/Distribution.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -42,16 +42,16 @@ class Distribution(val data: Array[Double], val startIdx: Int, val endIdx: Int)
4242
* given from 0 to 1
4343
* @param probabilities
4444
*/
45-
def getQuantiles(probabilities: Traversable[Double] = defaultProbabilities):
46-
IndexedSeq[Double] = {
45+
def getQuantiles(probabilities: Traversable[Double] = defaultProbabilities)
46+
: IndexedSeq[Double] = {
4747
probabilities.toIndexedSeq.map{p:Double => data(closestIndex(p))}
4848
}
4949

5050
private def closestIndex(p: Double) = {
5151
math.min((p * length).toInt + startIdx, endIdx - 1)
5252
}
5353

54-
def showQuantiles(out: PrintStream = System.out) {
54+
def showQuantiles(out: PrintStream = System.out): Unit = {
5555
out.println("min\t25%\t50%\t75%\tmax")
5656
getQuantiles(defaultProbabilities).foreach{q => out.print(q + "\t")}
5757
out.println

graphx/src/main/scala/org/apache/spark/graphx/lib/Analytics.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ import org.apache.spark.graphx.PartitionStrategy._
2626
*/
2727
object Analytics extends Logging {
2828

29-
def main(args: Array[String]) {
29+
def main(args: Array[String]): Unit = {
3030
val host = args(0)
3131
val taskType = args(1)
3232
val fname = args(2)

streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -431,13 +431,11 @@ class StreamingContext private[streaming] (
431431
* Stop the execution of the streams.
432432
* @param stopSparkContext Stop the associated SparkContext or not
433433
*/
434-
def stop(stopSparkContext: Boolean = true) {
435-
synchronized {
436-
scheduler.stop()
437-
logInfo("StreamingContext stopped successfully")
438-
waiter.notifyStop()
439-
if (stopSparkContext) sc.stop()
440-
}
434+
def stop(stopSparkContext: Boolean = true): Unit = synchronized {
435+
scheduler.stop()
436+
logInfo("StreamingContext stopped successfully")
437+
waiter.notifyStop()
438+
if (stopSparkContext) sc.stop()
441439
}
442440
}
443441

streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T
4949
* Print the first ten elements of each RDD generated in this DStream. This is an output
5050
* operator, so this DStream will be registered as an output stream and there materialized.
5151
*/
52-
def print() {
52+
def print(): Unit = {
5353
dstream.print()
5454
}
5555

streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -477,15 +477,15 @@ class JavaStreamingContext(val ssc: StreamingContext) {
477477
/**
478478
* Start the execution of the streams.
479479
*/
480-
def start() {
480+
def start(): Unit = {
481481
ssc.start()
482482
}
483483

484484
/**
485485
* Wait for the execution to stop. Any exceptions that occurs during the execution
486486
* will be thrown in this thread.
487487
*/
488-
def awaitTermination() {
488+
def awaitTermination(): Unit = {
489489
ssc.awaitTermination()
490490
}
491491

@@ -494,22 +494,22 @@ class JavaStreamingContext(val ssc: StreamingContext) {
494494
* will be thrown in this thread.
495495
* @param timeout time to wait in milliseconds
496496
*/
497-
def awaitTermination(timeout: Long) {
497+
def awaitTermination(timeout: Long): Unit = {
498498
ssc.awaitTermination(timeout)
499499
}
500500

501501
/**
502502
* Stop the execution of the streams. Will stop the associated JavaSparkContext as well.
503503
*/
504-
def stop() {
504+
def stop(): Unit = {
505505
ssc.stop()
506506
}
507507

508508
/**
509509
* Stop the execution of the streams.
510510
* @param stopSparkContext Stop the associated SparkContext or not
511511
*/
512-
def stop(stopSparkContext: Boolean) {
512+
def stop(stopSparkContext: Boolean): Unit = {
513513
ssc.stop(stopSparkContext)
514514
}
515515
}

streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -503,7 +503,7 @@ abstract class DStream[T: ClassTag] (
503503
* 'this' DStream will be registered as an output stream and therefore materialized.
504504
*/
505505
@deprecated("use foreachRDD", "0.9.0")
506-
def foreach(foreachFunc: RDD[T] => Unit) {
506+
def foreach(foreachFunc: RDD[T] => Unit): Unit = {
507507
this.foreachRDD(foreachFunc)
508508
}
509509

@@ -512,7 +512,7 @@ abstract class DStream[T: ClassTag] (
512512
* 'this' DStream will be registered as an output stream and therefore materialized.
513513
*/
514514
@deprecated("use foreachRDD", "0.9.0")
515-
def foreach(foreachFunc: (RDD[T], Time) => Unit) {
515+
def foreach(foreachFunc: (RDD[T], Time) => Unit): Unit = {
516516
this.foreachRDD(foreachFunc)
517517
}
518518

0 commit comments

Comments
 (0)