Skip to content

Commit 86e9eaa

Browse files
lianchengmarmbrus
authored andcommitted
[SPARK-4225][SQL] Resorts to SparkContext.version to inspect Spark version
This PR resorts to `SparkContext.version` rather than META-INF/MANIFEST.MF in the assembly jar to inspect Spark version. Currently, when built with Maven, the MANIFEST.MF file in the assembly jar is incorrectly replaced by Guava 15.0 MANIFEST.MF, probably because of the assembly/shading tricks. Another related PR is #3103, which tries to fix the MANIFEST issue. Author: Cheng Lian <[email protected]> Closes #3105 from liancheng/spark-4225 and squashes the following commits: d9585e1 [Cheng Lian] Resorts to SparkContext.version to inspect Spark version
1 parent 636d7bc commit 86e9eaa

File tree

2 files changed

+12
-24
lines changed

2 files changed

+12
-24
lines changed

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 7 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,8 @@ import java.io._
2121
import java.lang.management.ManagementFactory
2222
import java.net._
2323
import java.nio.ByteBuffer
24-
import java.util.jar.Attributes.Name
25-
import java.util.{Properties, Locale, Random, UUID}
26-
import java.util.concurrent.{ThreadFactory, ConcurrentHashMap, Executors, ThreadPoolExecutor}
27-
import java.util.jar.{Manifest => JarManifest}
24+
import java.util.concurrent.{ConcurrentHashMap, Executors, ThreadFactory, ThreadPoolExecutor}
25+
import java.util.{Locale, Properties, Random, UUID}
2826

2927
import scala.collection.JavaConversions._
3028
import scala.collection.Map
@@ -38,11 +36,11 @@ import com.google.common.io.{ByteStreams, Files}
3836
import com.google.common.util.concurrent.ThreadFactoryBuilder
3937
import org.apache.commons.lang3.SystemUtils
4038
import org.apache.hadoop.conf.Configuration
41-
import org.apache.log4j.PropertyConfigurator
4239
import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
40+
import org.apache.log4j.PropertyConfigurator
4341
import org.eclipse.jetty.util.MultiException
4442
import org.json4s._
45-
import tachyon.client.{TachyonFile,TachyonFS}
43+
import tachyon.client.{TachyonFS, TachyonFile}
4644

4745
import org.apache.spark._
4846
import org.apache.spark.deploy.SparkHadoopUtil
@@ -352,8 +350,8 @@ private[spark] object Utils extends Logging {
352350
* Download a file to target directory. Supports fetching the file in a variety of ways,
353351
* including HTTP, HDFS and files on a standard filesystem, based on the URL parameter.
354352
*
355-
* If `useCache` is true, first attempts to fetch the file to a local cache that's shared
356-
* across executors running the same application. `useCache` is used mainly for
353+
* If `useCache` is true, first attempts to fetch the file to a local cache that's shared
354+
* across executors running the same application. `useCache` is used mainly for
357355
* the executors, and not in local mode.
358356
*
359357
* Throws SparkException if the target file already exists and has different contents than
@@ -400,7 +398,7 @@ private[spark] object Utils extends Logging {
400398
} else {
401399
doFetchFile(url, targetDir, fileName, conf, securityMgr, hadoopConf)
402400
}
403-
401+
404402
// Decompress the file if it's a .tar or .tar.gz
405403
if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) {
406404
logInfo("Untarring " + fileName)
@@ -1776,13 +1774,6 @@ private[spark] object Utils extends Logging {
17761774
s"$libraryPathEnvName=$libraryPath$ampersand"
17771775
}
17781776

1779-
lazy val sparkVersion =
1780-
SparkContext.jarOfObject(this).map { path =>
1781-
val manifestUrl = new URL(s"jar:file:$path!/META-INF/MANIFEST.MF")
1782-
val manifest = new JarManifest(manifestUrl.openStream())
1783-
manifest.getMainAttributes.getValue(Name.IMPLEMENTATION_VERSION)
1784-
}.getOrElse("Unknown")
1785-
17861777
/**
17871778
* Return the value of a config either through the SparkConf or the Hadoop configuration
17881779
* if this is Yarn mode. In the latter case, this defaults to the value set through SparkConf
@@ -1796,7 +1787,6 @@ private[spark] object Utils extends Logging {
17961787
sparkValue
17971788
}
17981789
}
1799-
18001790
}
18011791

18021792
/**

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIService.scala

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -17,18 +17,16 @@
1717

1818
package org.apache.spark.sql.hive.thriftserver
1919

20-
import java.util.jar.Attributes.Name
21-
22-
import scala.collection.JavaConversions._
23-
2420
import java.io.IOException
2521
import java.util.{List => JList}
2622
import javax.security.auth.login.LoginException
2723

24+
import scala.collection.JavaConversions._
25+
2826
import org.apache.commons.logging.Log
29-
import org.apache.hadoop.security.UserGroupInformation
3027
import org.apache.hadoop.hive.conf.HiveConf
3128
import org.apache.hadoop.hive.shims.ShimLoader
29+
import org.apache.hadoop.security.UserGroupInformation
3230
import org.apache.hive.service.Service.STATE
3331
import org.apache.hive.service.auth.HiveAuthFactory
3432
import org.apache.hive.service.cli._
@@ -50,7 +48,7 @@ private[hive] class SparkSQLCLIService(hiveContext: HiveContext)
5048
addService(sparkSqlSessionManager)
5149
var sparkServiceUGI: UserGroupInformation = null
5250

53-
if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
51+
if (ShimLoader.getHadoopShims.isSecurityEnabled) {
5452
try {
5553
HiveAuthFactory.loginFromKeytab(hiveConf)
5654
sparkServiceUGI = ShimLoader.getHadoopShims.getUGIForConf(hiveConf)
@@ -68,7 +66,7 @@ private[hive] class SparkSQLCLIService(hiveContext: HiveContext)
6866
getInfoType match {
6967
case GetInfoType.CLI_SERVER_NAME => new GetInfoValue("Spark SQL")
7068
case GetInfoType.CLI_DBMS_NAME => new GetInfoValue("Spark SQL")
71-
case GetInfoType.CLI_DBMS_VER => new GetInfoValue(Utils.sparkVersion)
69+
case GetInfoType.CLI_DBMS_VER => new GetInfoValue(hiveContext.sparkContext.version)
7270
case _ => super.getInfo(sessionHandle, getInfoType)
7371
}
7472
}

0 commit comments

Comments
 (0)