Skip to content

Commit 7550490

Browse files
committed
Addressed PR comments
1 parent d8600cf commit 7550490

File tree

3 files changed

+7
-5
lines changed

3 files changed

+7
-5
lines changed

core/src/main/scala/org/apache/spark/util/ThreadUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ private[spark] object ThreadUtils {
8989
}
9090

9191
/**
92-
* Run a piece of code in a new thread, and the get result. Exception in the new thread is
92+
* Run a piece of code in a new thread and return the result. Exception in the new thread is
9393
* thrown in the caller thread with an adjusted stack trace that removes references to this
9494
* method for clarity. The exception stack traces will be like the following
9595
*

core/src/test/scala/org/apache/spark/util/ThreadUtilsSuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import java.util.concurrent.{CountDownLatch, TimeUnit}
2222

2323
import scala.concurrent.duration._
2424
import scala.concurrent.{Await, Future}
25+
import scala.util.Random
2526

2627
import org.apache.spark.SparkFunSuite
2728

@@ -74,10 +75,11 @@ class ThreadUtilsSuite extends SparkFunSuite {
7475
assert(
7576
runInNewThread("thread-name", isDaemon = false) { Thread.currentThread().isDaemon } === false
7677
)
78+
val uniqueExceptionMessage = "test" + Random.nextInt()
7779
val exception = intercept[IllegalArgumentException] {
78-
runInNewThread("thread-name") { throw new IllegalArgumentException("test") }
80+
runInNewThread("thread-name") { throw new IllegalArgumentException(uniqueExceptionMessage) }
7981
}
80-
assert(exception.asInstanceOf[IllegalArgumentException].getMessage.contains("test"))
82+
assert(exception.asInstanceOf[IllegalArgumentException].getMessage === uniqueExceptionMessage)
8183
assert(exception.getStackTrace.mkString("\n").contains(
8284
"... run in separate thread using org.apache.spark.util.ThreadUtils ...") === true,
8385
"stack trace does not contain expected place holder"

streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -180,7 +180,7 @@ class StreamingContextSuite extends SparkFunSuite with BeforeAndAfter with Timeo
180180
assert(ssc.scheduler.isStarted === false)
181181
}
182182

183-
test("start should set job group correctly") {
183+
test("start should set job group and description of streaming jobs correctly") {
184184
ssc = new StreamingContext(conf, batchDuration)
185185
ssc.sc.setJobGroup("non-streaming", "non-streaming", true)
186186
val sc = ssc.sc
@@ -198,7 +198,7 @@ class StreamingContextSuite extends SparkFunSuite with BeforeAndAfter with Timeo
198198
}
199199
ssc.start()
200200

201-
eventually(timeout(5 seconds), interval(10 milliseconds)) {
201+
eventually(timeout(10 seconds), interval(10 milliseconds)) {
202202
assert(allFound === true)
203203
}
204204

0 commit comments

Comments
 (0)