-
Notifications
You must be signed in to change notification settings - Fork 29k
SPARK-22830 Scala Coding style has been improved in Spark Examples #20016
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
4ac1cb1
8e0e4ee
9da7e36
5d8d0c9
a14eb3e
319e282
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -49,12 +49,10 @@ object DFSReadWriteTest { | |
| } | ||
|
|
||
| private def printUsage(): Unit = { | ||
| val usage: String = "DFS Read-Write Test\n" + | ||
| "\n" + | ||
| "Usage: localFile dfsDir\n" + | ||
| "\n" + | ||
| "localFile - (string) local file to use in test\n" + | ||
| "dfsDir - (string) DFS directory for read/write tests\n" | ||
| val usage = s"""DFS Read-Write Test | ||
|
||
| Usage: localFile dfsDir | ||
| localFile - (string) local file to use in test | ||
| dfsDir - (string) DFS directory for read/write tests""" | ||
|
|
||
| println(usage) | ||
| } | ||
|
|
@@ -69,13 +67,13 @@ object DFSReadWriteTest { | |
|
|
||
| localFilePath = new File(args(i)) | ||
| if (!localFilePath.exists) { | ||
| System.err.println("Given path (" + args(i) + ") does not exist.\n") | ||
| System.err.println(s"Given path (${args(i)}) does not exist") | ||
| printUsage() | ||
| System.exit(1) | ||
| } | ||
|
|
||
| if (!localFilePath.isFile) { | ||
| System.err.println("Given path (" + args(i) + ") is not a file.\n") | ||
| System.err.println(s"Given path (${args(i)}) is not a file") | ||
| printUsage() | ||
| System.exit(1) | ||
| } | ||
|
|
@@ -97,22 +95,22 @@ object DFSReadWriteTest { | |
| def main(args: Array[String]): Unit = { | ||
| parseArgs(args) | ||
|
|
||
| println("Performing local word count") | ||
| println(s"Performing local word count") | ||
|
||
| val fileContents = readFile(localFilePath.toString()) | ||
| val localWordCount = runLocalWordCount(fileContents) | ||
|
|
||
| println("Creating SparkSession") | ||
| println(s"Creating SparkSession") | ||
| val spark = SparkSession | ||
| .builder | ||
| .appName("DFS Read Write Test") | ||
| .getOrCreate() | ||
|
|
||
| println("Writing local file to DFS") | ||
| val dfsFilename = dfsDirPath + "/dfs_read_write_test" | ||
| println(s"Writing local file to DFS") | ||
| val dfsFilename = s"${dfsDirPath}/dfs_read_write_test" | ||
| val fileRDD = spark.sparkContext.parallelize(fileContents) | ||
| fileRDD.saveAsTextFile(dfsFilename) | ||
|
|
||
| println("Reading file from DFS and running Word Count") | ||
| println(s"Reading file from DFS and running Word Count") | ||
| val readFileRDD = spark.sparkContext.textFile(dfsFilename) | ||
|
|
||
| val dfsWordCount = readFileRDD | ||
|
|
@@ -127,11 +125,11 @@ object DFSReadWriteTest { | |
| spark.stop() | ||
|
|
||
| if (localWordCount == dfsWordCount) { | ||
| println(s"Success! Local Word Count ($localWordCount) " + | ||
| s"and DFS Word Count ($dfsWordCount) agree.") | ||
| println(s"Success! Local Word Count ($localWordCount) | ||
| and DFS Word Count ($dfsWordCount) agree.") | ||
|
||
| } else { | ||
| println(s"Failure! Local Word Count ($localWordCount) " + | ||
| s"and DFS Word Count ($dfsWordCount) disagree.") | ||
| println(s"Failure! Local Word Count ($localWordCount) | ||
| and DFS Word Count ($dfsWordCount) disagree.") | ||
| } | ||
|
|
||
| } | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -95,7 +95,7 @@ object LocalALS { | |
|
|
||
| def showWarning() { | ||
| System.err.println( | ||
| """WARN: This is a naive implementation of ALS and is given as an example! | ||
| s"""WARN: This is a naive implementation of ALS and is given as an example! | ||
|
||
| |Please use org.apache.spark.ml.recommendation.ALS | ||
| |for more conventional use. | ||
| """.stripMargin) | ||
|
|
@@ -110,7 +110,7 @@ object LocalALS { | |
| F = f.toInt | ||
| ITERATIONS = iters.toInt | ||
| case _ => | ||
| System.err.println("Usage: LocalALS <M> <U> <F> <iters>") | ||
| System.err.println(s"Usage: LocalALS <M> <U> <F> <iters>") | ||
| System.exit(1) | ||
| } | ||
|
|
||
|
|
@@ -129,8 +129,7 @@ object LocalALS { | |
| println(s"Iteration $iter:") | ||
| ms = (0 until M).map(i => updateMovie(i, ms(i), us, R)).toArray | ||
| us = (0 until U).map(j => updateUser(j, us(j), ms, R)).toArray | ||
| println("RMSE = " + rmse(R, ms, us)) | ||
| println() | ||
| println(s"RMSE = ${rmse(R, ms, us)}") | ||
| } | ||
| } | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -41,7 +41,7 @@ object LocalFileLR { | |
|
|
||
| def showWarning() { | ||
| System.err.println( | ||
| """WARN: This is a naive implementation of Logistic Regression and is given as an example! | ||
| s"""WARN: This is a naive implementation of Logistic Regression and is given as an example! | ||
| |Please use org.apache.spark.ml.classification.LogisticRegression | ||
| |for more conventional use. | ||
| """.stripMargin) | ||
|
|
@@ -58,10 +58,10 @@ object LocalFileLR { | |
|
|
||
| // Initialize w to a random value | ||
| val w = DenseVector.fill(D) {2 * rand.nextDouble - 1} | ||
| println("Initial w: " + w) | ||
| println(s"Initial w: ${w}") | ||
|
||
|
|
||
| for (i <- 1 to ITERATIONS) { | ||
| println("On iteration " + i) | ||
| println(s"On iteration ${i}") | ||
| val gradient = DenseVector.zeros[Double](D) | ||
| for (p <- points) { | ||
| val scale = (1 / (1 + math.exp(-p.y * (w.dot(p.x)))) - 1) * p.y | ||
|
|
@@ -71,7 +71,7 @@ object LocalFileLR { | |
| } | ||
|
|
||
| fileSrc.close() | ||
| println("Final w: " + w) | ||
| println(s"Final w: ${w}") | ||
| } | ||
| } | ||
| // scalastyle:on println | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -80,7 +80,7 @@ object SparkALS { | |
|
|
||
| def showWarning() { | ||
| System.err.println( | ||
| """WARN: This is a naive implementation of ALS and is given as an example! | ||
| s"""WARN: This is a naive implementation of ALS and is given as an example! | ||
|
||
| |Please use org.apache.spark.ml.recommendation.ALS | ||
| |for more conventional use. | ||
| """.stripMargin) | ||
|
|
@@ -100,7 +100,7 @@ object SparkALS { | |
| ITERATIONS = iters.getOrElse("5").toInt | ||
| slices = slices_.getOrElse("2").toInt | ||
| case _ => | ||
| System.err.println("Usage: SparkALS [M] [U] [F] [iters] [partitions]") | ||
| System.err.println(s"Usage: SparkALS [M] [U] [F] [iters] [partitions]") | ||
|
||
| System.exit(1) | ||
| } | ||
|
|
||
|
|
@@ -135,10 +135,8 @@ object SparkALS { | |
| .map(i => update(i, usb.value(i), msb.value, Rc.value.transpose())) | ||
| .collect() | ||
| usb = sc.broadcast(us) // Re-broadcast us because it was updated | ||
| println("RMSE = " + rmse(R, ms, us)) | ||
| println() | ||
| println(s"RMSE = ${rmse(R, ms, us)}") | ||
| } | ||
|
|
||
| spark.stop() | ||
| } | ||
|
|
||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Beyond the unnecessary { } that @srowen has already mentioned, this isn't really a style improvement.
"a string " + anotherStringis arguably at least as good stylistically as using string interpolation for such simple concatenations of a string reference to the end of a string literal. It's only when there are multiple concatenations and/or multiple string references that interpolation is clearly the better way.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@markhamstra Thank you for valueable suggestion, I am addressed and did new commit.