Skip to content

Commit ae7834b

Browse files
committed
minor cleanup
1 parent a8b3ec6 commit ae7834b

File tree

3 files changed

+7
-11
lines changed

3 files changed

+7
-11
lines changed

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,6 @@ import org.apache.spark.deploy.SparkHadoopUtil
4040
import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
4141

4242

43-
44-
45-
4643
/**
4744
* Various utility methods used by Spark.
4845
*/
@@ -164,7 +161,7 @@ private[spark] object Utils extends Logging {
164161
shutdownDeletePaths += absolutePath
165162
}
166163
}
167-
164+
168165
// Register the tachyon path to be deleted via shutdown hook
169166
def registerShutdownDeleteDir(tachyonfile: TachyonFile) {
170167
val absolutePath = tachyonfile.getPath()
@@ -180,7 +177,7 @@ private[spark] object Utils extends Logging {
180177
shutdownDeletePaths.contains(absolutePath)
181178
}
182179
}
183-
180+
184181
// Is the path already registered to be deleted via a shutdown hook ?
185182
def hasShutdownDeleteTachyonDir(file: TachyonFile): Boolean = {
186183
val absolutePath = file.getPath()
@@ -204,7 +201,7 @@ private[spark] object Utils extends Logging {
204201
}
205202
retval
206203
}
207-
204+
208205
// Note: if file is child of some registered path, while not equal to it, then return true;
209206
// else false. This is to ensure that two shutdown hooks do not try to delete each others
210207
// paths - resulting in Exception and incomplete cleanup.
@@ -573,7 +570,7 @@ private[spark] object Utils extends Logging {
573570
}
574571
}
575572
}
576-
573+
577574
/**
578575
* Delete a file or directory and its contents recursively.
579576
*/

core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -411,6 +411,7 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
411411
}
412412

413413
test("tachyon storage") {
414+
// TODO Make the spark.test.tachyon.enable true after using tachyon 0.5.0 testing jar.
414415
val tachyonUnitTestEnabled = conf.getBoolean("spark.test.tachyon.enable", false)
415416
if (tachyonUnitTestEnabled) {
416417
store = new BlockManager("<driver>", actorSystem, master, serializer, 1200, conf, securityMgr)

examples/src/main/scala/org/apache/spark/examples/SparkTachyonHdfsLR.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ import org.apache.spark.scheduler.InputFormatInfo
2626
import org.apache.spark.storage.StorageLevel
2727

2828
/**
29-
* Logistic regression based classification.
29+
* Logistic regression based classification.
3030
* This example uses Tachyon to persist rdds during computation.
3131
*/
3232
object SparkTachyonHdfsLR {
@@ -36,8 +36,6 @@ object SparkTachyonHdfsLR {
3636
case class DataPoint(x: Vector, y: Double)
3737

3838
def parsePoint(line: String): DataPoint = {
39-
//val nums = line.split(' ').map(_.toDouble)
40-
//return DataPoint(new Vector(nums.slice(1, D+1)), nums(0))
4139
val tok = new java.util.StringTokenizer(line, " ")
4240
var y = tok.nextToken.toDouble
4341
var x = new Array[Double](D)
@@ -56,7 +54,7 @@ object SparkTachyonHdfsLR {
5654
val inputPath = args(1)
5755
val conf = SparkHadoopUtil.get.newConfiguration()
5856
val sc = new SparkContext(args(0), "SparkTachyonHdfsLR",
59-
System.getenv("SPARK_HOME"), SparkContext.jarOfClass(this.getClass), Map(),
57+
System.getenv("SPARK_HOME"), SparkContext.jarOfClass(this.getClass), Map(),
6058
InputFormatInfo.computePreferredLocations(
6159
Seq(new InputFormatInfo(conf, classOf[org.apache.hadoop.mapred.TextInputFormat], inputPath))
6260
))

0 commit comments

Comments
 (0)