1717
1818package org .apache .spark
1919
20- import org .apache .spark .util .ResetSystemProperties
2120import org .scalatest .FunSuite
2221
2322import org .apache .hadoop .io .BytesWritable
2423
25- class SparkContextSuite extends FunSuite with ResetSystemProperties with LocalSparkContext {
24+ class SparkContextSuite extends FunSuite with LocalSparkContext {
2625
2726 test(" Only one SparkContext may be active at a time" ) {
2827 // Regression test for SPARK-4180
29- System .setProperty(" spark.driver.allowMultipleContexts" , " false" )
3028 val conf = new SparkConf ().setAppName(" test" ).setMaster(" local" )
29+ .set(" spark.driver.allowMultipleContexts" , " false" )
3130 sc = new SparkContext (conf)
3231 // A SparkContext is already running, so we shouldn't be able to create a second one
3332 intercept[SparkException ] { new SparkContext (conf) }
@@ -37,20 +36,20 @@ class SparkContextSuite extends FunSuite with ResetSystemProperties with LocalSp
3736 }
3837
3938 test(" Can still construct a new SparkContext after failing to construct a previous one" ) {
40- System .setProperty (" spark.driver.allowMultipleContexts" , " false" )
39+ val conf = new SparkConf ().set (" spark.driver.allowMultipleContexts" , " false" )
4140 // This is an invalid configuration (no app name or master URL)
4241 intercept[SparkException ] {
43- new SparkContext (new SparkConf () )
42+ new SparkContext (conf )
4443 }
4544 // Even though those earlier calls failed, we should still be able to create a new context
46- sc = new SparkContext (new SparkConf () .setMaster(" local" ).setAppName(" test" ))
45+ sc = new SparkContext (conf .setMaster(" local" ).setAppName(" test" ))
4746 }
4847
4948 test(" Check for multiple SparkContexts can be disabled via undocumented debug option" ) {
50- System .setProperty(" spark.driver.allowMultipleContexts" , " true" )
5149 var secondSparkContext : SparkContext = null
5250 try {
5351 val conf = new SparkConf ().setAppName(" test" ).setMaster(" local" )
52+ .set(" spark.driver.allowMultipleContexts" , " true" )
5453 sc = new SparkContext (conf)
5554 secondSparkContext = new SparkContext (conf)
5655 } finally {
0 commit comments