Skip to content

Commit 21a1e1b

Browse files
witgoJoshRosen
authored andcommitted
[SPARK-3273][SPARK-3301]We should read the version information from the same place
Author: GuoQiang Li <[email protected]> Closes apache#2175 from witgo/SPARK-3273 and squashes the following commits: cf9c65a [GuoQiang Li] We should read the version information from the same place 2a44e2f [GuoQiang Li] The spark version in the welcome message of pyspark is not correct
1 parent 607ae39 commit 21a1e1b

File tree

6 files changed

+14
-10
lines changed

6 files changed

+14
-10
lines changed

core/src/main/scala/org/apache/spark/SparkContext.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend, SparkD
4949
import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
5050
import org.apache.spark.scheduler.local.LocalBackend
5151
import org.apache.spark.storage._
52+
import org.apache.spark.SPARK_VERSION
5253
import org.apache.spark.ui.SparkUI
5354
import org.apache.spark.util.{CallSite, ClosureCleaner, MetadataCleaner, MetadataCleanerType, TimeStampedWeakValueHashMap, Utils}
5455

@@ -825,7 +826,7 @@ class SparkContext(config: SparkConf) extends Logging {
825826
}
826827

827828
/** The version of Spark on which this application is running. */
828-
def version = SparkContext.SPARK_VERSION
829+
def version = SPARK_VERSION
829830

830831
/**
831832
* Return a map from the slave to the max memory available for caching and the remaining
@@ -1297,8 +1298,6 @@ class SparkContext(config: SparkConf) extends Logging {
12971298
*/
12981299
object SparkContext extends Logging {
12991300

1300-
private[spark] val SPARK_VERSION = "1.2.0-SNAPSHOT"
1301-
13021301
private[spark] val SPARK_JOB_DESCRIPTION = "spark.job.description"
13031302

13041303
private[spark] val SPARK_JOB_GROUP_ID = "spark.jobGroup.id"

core/src/main/scala/org/apache/spark/package.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,4 +44,5 @@ package org.apache
4444

4545
package object spark {
4646
// For package docs only
47+
val SPARK_VERSION = "1.2.0-SNAPSHOT"
4748
}

core/src/main/scala/org/apache/spark/scheduler/EventLoggingListener.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ import org.json4s.jackson.JsonMethods._
2929
import org.apache.spark.{Logging, SparkConf, SparkContext}
3030
import org.apache.spark.deploy.SparkHadoopUtil
3131
import org.apache.spark.io.CompressionCodec
32+
import org.apache.spark.SPARK_VERSION
3233
import org.apache.spark.util.{FileLogger, JsonProtocol, Utils}
3334

3435
/**
@@ -86,7 +87,7 @@ private[spark] class EventLoggingListener(
8687
sparkConf.get("spark.io.compression.codec", CompressionCodec.DEFAULT_COMPRESSION_CODEC)
8788
logger.newFile(COMPRESSION_CODEC_PREFIX + codec)
8889
}
89-
logger.newFile(SPARK_VERSION_PREFIX + SparkContext.SPARK_VERSION)
90+
logger.newFile(SPARK_VERSION_PREFIX + SPARK_VERSION)
9091
logger.newFile(LOG_PREFIX + logger.fileIndex)
9192
}
9293

core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import org.scalatest.{BeforeAndAfter, FunSuite}
2828
import org.apache.spark.{SparkConf, SparkContext}
2929
import org.apache.spark.deploy.SparkHadoopUtil
3030
import org.apache.spark.io.CompressionCodec
31+
import org.apache.spark.SPARK_VERSION
3132
import org.apache.spark.util.{JsonProtocol, Utils}
3233

3334
import java.io.File
@@ -196,7 +197,7 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter {
196197

197198
def assertInfoCorrect(info: EventLoggingInfo, loggerStopped: Boolean) {
198199
assert(info.logPaths.size > 0)
199-
assert(info.sparkVersion === SparkContext.SPARK_VERSION)
200+
assert(info.sparkVersion === SPARK_VERSION)
200201
assert(info.compressionCodec.isDefined === compressionCodec.isDefined)
201202
info.compressionCodec.foreach { codec =>
202203
assert(compressionCodec.isDefined)
@@ -381,7 +382,7 @@ class EventLoggingListenerSuite extends FunSuite with BeforeAndAfter {
381382
private def assertSparkVersionIsValid(logFiles: Array[FileStatus]) {
382383
val file = logFiles.map(_.getPath.getName).find(EventLoggingListener.isSparkVersionFile)
383384
assert(file.isDefined)
384-
assert(EventLoggingListener.parseSparkVersion(file.get) === SparkContext.SPARK_VERSION)
385+
assert(EventLoggingListener.parseSparkVersion(file.get) === SPARK_VERSION)
385386
}
386387

387388
private def assertCompressionCodecIsValid(logFiles: Array[FileStatus], compressionCodec: String) {

python/pyspark/shell.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,9 @@
4949
____ __
5050
/ __/__ ___ _____/ /__
5151
_\ \/ _ \/ _ `/ __/ '_/
52-
/__ / .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT
52+
/__ / .__/\_,_/_/ /_/\_\ version %s
5353
/_/
54-
""")
54+
""" % sc.version)
5555
print("Using Python version %s (%s, %s)" % (
5656
platform.python_version(),
5757
platform.python_build()[0],

repl/src/main/scala/org/apache/spark/repl/SparkILoopInit.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@ import scala.reflect.internal.util.Position
1414
import scala.util.control.Exception.ignoring
1515
import scala.tools.nsc.util.stackTraceString
1616

17+
import org.apache.spark.SPARK_VERSION
18+
1719
/**
1820
* Machinery for the asynchronous initialization of the repl.
1921
*/
@@ -26,9 +28,9 @@ trait SparkILoopInit {
2628
____ __
2729
/ __/__ ___ _____/ /__
2830
_\ \/ _ \/ _ `/ __/ '_/
29-
/___/ .__/\_,_/_/ /_/\_\ version 1.0.0-SNAPSHOT
31+
/___/ .__/\_,_/_/ /_/\_\ version %s
3032
/_/
31-
""")
33+
""".format(SPARK_VERSION))
3234
import Properties._
3335
val welcomeMsg = "Using Scala %s (%s, Java %s)".format(
3436
versionString, javaVmName, javaVersion)

0 commit comments

Comments
 (0)