Skip to content

Commit 1af0b0e

Browse files
author
Andrew Or
committed
Fix style
1 parent 074c00b commit 1af0b0e

File tree

2 files changed

+6
-2
lines changed

2 files changed

+6
-2
lines changed

external/flume/src/main/scala/org/apache/spark/streaming/flume/FlumePollingInputDStream.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,9 @@ private[streaming] class FlumePollingInputDStream[T: ClassTag](
5353
storageLevel: StorageLevel
5454
) extends ReceiverInputDStream[SparkFlumeEvent](_ssc) {
5555

56-
protected[streaming] override val customScopeName: Option[String] = Some(s"flume polling stream [$id]")
56+
protected[streaming] override val customScopeName: Option[String] = {
57+
Some(s"flume polling stream [$id]")
58+
}
5759

5860
override def getReceiver(): Receiver[SparkFlumeEvent] = {
5961
new FlumePollingReceiver(addresses, maxBatchSize, parallelism, storageLevel)

external/kafka/src/main/scala/org/apache/spark/streaming/kafka/DirectKafkaInputDStream.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,9 @@ class DirectKafkaInputDStream[
6565
val maxRetries = context.sparkContext.getConf.getInt(
6666
"spark.streaming.kafka.maxRetries", 1)
6767

68-
protected[streaming] override val customScopeName: Option[String] = Some(s"kafka direct stream [$id]")
68+
protected[streaming] override val customScopeName: Option[String] = {
69+
Some(s"kafka direct stream [$id]")
70+
}
6971

7072
protected[streaming] override val checkpointData =
7173
new DirectKafkaInputDStreamCheckpointData

0 commit comments

Comments
 (0)