@@ -50,7 +50,7 @@ class EventLoggingListenerSuite extends SparkFunSuite with LocalSparkContext wit
5050 private var testDirPath : Path = _
5151
5252 before {
53- testDir = Utils .createTempDir()
53+ testDir = Utils .createTempDir(namePrefix = s " event log " )
5454 testDir.deleteOnExit()
5555 testDirPath = new Path (testDir.getAbsolutePath())
5656 }
@@ -62,7 +62,7 @@ class EventLoggingListenerSuite extends SparkFunSuite with LocalSparkContext wit
6262 test(" Verify log file exist" ) {
6363 // Verify logging directory exists
6464 val conf = getLoggingConf(testDirPath)
65- val eventLogger = new EventLoggingListener (" test" , None , testDirPath.toUri() , conf)
65+ val eventLogger = new EventLoggingListener (" test" , None , testDirPath.toString , conf)
6666 eventLogger.start()
6767
6868 val logPath = new Path (eventLogger.logPath + EventLoggingListener .IN_PROGRESS )
@@ -100,16 +100,15 @@ class EventLoggingListenerSuite extends SparkFunSuite with LocalSparkContext wit
100100 val secretPassword = " secret_password"
101101 val conf = getLoggingConf(testDirPath, None )
102102 .set(key, secretPassword)
103- val eventLogger = new EventLoggingListener (" test" , None , testDirPath.toUri() , conf)
103+ val eventLogger = new EventLoggingListener (" test" , None , testDirPath.toString , conf)
104104 val envDetails = SparkEnv .environmentDetails(conf, " FIFO" , Seq .empty, Seq .empty)
105105 val event = SparkListenerEnvironmentUpdate (envDetails)
106106 val redactedProps = eventLogger.redactEvent(event).environmentDetails(" Spark Properties" ).toMap
107107 assert(redactedProps(key) == " *********(redacted)" )
108108 }
109109
110110 test(" Log overwriting" ) {
111- val logUri = EventLoggingListener .getLogPath(testDir.toURI, " test" , None )
112- val logPath = new URI (logUri).getPath
111+ val logPath = EventLoggingListener .getLogPath(testDir.toString, " test" , None )
113112 // Create file before writing the event log
114113 new FileOutputStream (new File (logPath)).close()
115114 // Expected IOException, since we haven't enabled log overwrite.
@@ -119,7 +118,7 @@ class EventLoggingListenerSuite extends SparkFunSuite with LocalSparkContext wit
119118 }
120119
121120 test(" Event log name" ) {
122- val baseDirUri = Utils .resolveURI( " /base-dir" )
121+ val baseDirUri = " /base-dir"
123122 // without compression
124123 assert(s " ${baseDirUri.toString}/app1 " === EventLoggingListener .getLogPath(
125124 baseDirUri, " app1" , None ))
@@ -154,7 +153,7 @@ class EventLoggingListenerSuite extends SparkFunSuite with LocalSparkContext wit
154153 val conf = getLoggingConf(testDirPath, compressionCodec)
155154 extraConf.foreach { case (k, v) => conf.set(k, v) }
156155 val logName = compressionCodec.map(" test-" + _).getOrElse(" test" )
157- val eventLogger = new EventLoggingListener (logName, None , testDirPath.toUri() , conf)
156+ val eventLogger = new EventLoggingListener (logName, None , testDirPath.toString , conf)
158157 val listenerBus = new LiveListenerBus (sc)
159158 val applicationStart = SparkListenerApplicationStart (" Greatest App (N)ever" , None ,
160159 125L , " Mickey" , None )
@@ -190,15 +189,12 @@ class EventLoggingListenerSuite extends SparkFunSuite with LocalSparkContext wit
190189 * This runs a simple Spark job and asserts that the expected events are logged when expected.
191190 */
192191 private def testApplicationEventLogging (compressionCodec : Option [String ] = None ) {
193- // Set defaultFS to something that would cause an exception, to make sure we don't run
194- // into SPARK-6688.
195192 val conf = getLoggingConf(testDirPath, compressionCodec)
196- .set(" spark.hadoop.fs.defaultFS" , " unsupported://example.com" )
197193 sc = new SparkContext (" local-cluster[2,2,1024]" , " test" , conf)
198194 assert(sc.eventLogger.isDefined)
199195 val eventLogger = sc.eventLogger.get
200196 val eventLogPath = eventLogger.logPath
201- val expectedLogDir = testDir.toURI()
197+ val expectedLogDir = testDir.getAbsolutePath
202198 assert(eventLogPath === EventLoggingListener .getLogPath(
203199 expectedLogDir, sc.applicationId, None , compressionCodec.map(CompressionCodec .getShortName)))
204200
@@ -290,7 +286,7 @@ object EventLoggingListenerSuite {
290286 val conf = new SparkConf
291287 conf.set(" spark.eventLog.enabled" , " true" )
292288 conf.set(" spark.eventLog.testing" , " true" )
293- conf.set(" spark.eventLog.dir" , logDir.toUri. toString)
289+ conf.set(" spark.eventLog.dir" , logDir.toString)
294290 compressionCodec.foreach { codec =>
295291 conf.set(" spark.eventLog.compress" , " true" )
296292 conf.set(" spark.io.compression.codec" , codec)
0 commit comments