diff --git a/common/utils/pom.xml b/common/utils/pom.xml index 20abad212432c..2f2fee0cf41e8 100644 --- a/common/utils/pom.xml +++ b/common/utils/pom.xml @@ -84,7 +84,69 @@ target/scala-${scala.binary.version}/classes target/scala-${scala.binary.version}/test-classes + + + ${project.basedir}/src/main/resources + + + + ${project.build.directory}/extra-resources + true + + + + org.apache.maven.plugins + maven-antrun-plugin + + + choose-shell-and-script + validate + + run + + + true + + + + + + + + + + + + Shell to use for generating spark-version-info.properties file = + ${shell} + + Script to use for generating spark-version-info.properties file = + ${spark-build-info-script} + + + + + + generate-spark-build-info + generate-resources + + + + + + + + + + + + run + + + + diff --git a/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala b/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala new file mode 100644 index 0000000000000..23f671f9d7647 --- /dev/null +++ b/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark + +import java.util.Properties + +object SparkBuildInfo { + + val ( + spark_version: String, + spark_branch: String, + spark_revision: String, + spark_build_user: String, + spark_repo_url: String, + spark_build_date: String, + spark_doc_root: String) = { + + val resourceStream = Thread.currentThread().getContextClassLoader. + getResourceAsStream("spark-version-info.properties") + if (resourceStream == null) { + throw new SparkException("Could not find spark-version-info.properties") + } + + try { + val unknownProp = "" + val props = new Properties() + props.load(resourceStream) + ( + props.getProperty("version", unknownProp), + props.getProperty("branch", unknownProp), + props.getProperty("revision", unknownProp), + props.getProperty("user", unknownProp), + props.getProperty("url", unknownProp), + props.getProperty("date", unknownProp), + props.getProperty("docroot", unknownProp) + ) + } catch { + case e: Exception => + throw new SparkException("Error loading properties from spark-version-info.properties", e) + } finally { + if (resourceStream != null) { + try { + resourceStream.close() + } catch { + case e: Exception => + throw new SparkException("Error closing spark build info resource stream", e) + } + } + } + } +} diff --git a/core/pom.xml b/core/pom.xml index 6e3552c90c356..1d552a65f7b6d 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -513,69 +513,7 @@ target/scala-${scala.binary.version}/classes target/scala-${scala.binary.version}/test-classes - - - ${project.basedir}/src/main/resources - - - - ${project.build.directory}/extra-resources - true - - - - org.apache.maven.plugins - maven-antrun-plugin - - - choose-shell-and-script - validate - - run - - - true - - - - - - - - - - - - Shell to use for generating spark-version-info.properties file = - ${shell} - - Script to use for generating spark-version-info.properties file = - ${spark-build-info-script} - - - - - - generate-spark-build-info - generate-resources - - - - - - - - - - - - run - - - - org.apache.maven.plugins maven-dependency-plugin diff --git a/core/src/main/scala/org/apache/spark/package.scala b/core/src/main/scala/org/apache/spark/package.scala index 92cab14294f21..5b512fbcda8ba 100644 --- a/core/src/main/scala/org/apache/spark/package.scala +++ b/core/src/main/scala/org/apache/spark/package.scala @@ -17,8 +17,6 @@ package org.apache -import java.util.Properties - import org.apache.spark.util.VersionUtils /** @@ -45,60 +43,12 @@ import org.apache.spark.util.VersionUtils * level interfaces. These are subject to changes or removal in minor releases. */ package object spark { - - private object SparkBuildInfo { - - val ( - spark_version: String, - spark_branch: String, - spark_revision: String, - spark_build_user: String, - spark_repo_url: String, - spark_build_date: String, - spark_doc_root: String) = { - - val resourceStream = Thread.currentThread().getContextClassLoader. - getResourceAsStream("spark-version-info.properties") - if (resourceStream == null) { - throw new SparkException("Could not find spark-version-info.properties") - } - - try { - val unknownProp = "" - val props = new Properties() - props.load(resourceStream) - ( - props.getProperty("version", unknownProp), - props.getProperty("branch", unknownProp), - props.getProperty("revision", unknownProp), - props.getProperty("user", unknownProp), - props.getProperty("url", unknownProp), - props.getProperty("date", unknownProp), - props.getProperty("docroot", unknownProp) - ) - } catch { - case e: Exception => - throw new SparkException("Error loading properties from spark-version-info.properties", e) - } finally { - if (resourceStream != null) { - try { - resourceStream.close() - } catch { - case e: Exception => - throw new SparkException("Error closing spark build info resource stream", e) - } - } - } - } - } - - val SPARK_VERSION = SparkBuildInfo.spark_version - val SPARK_VERSION_SHORT = VersionUtils.shortVersion(SparkBuildInfo.spark_version) - val SPARK_BRANCH = SparkBuildInfo.spark_branch - val SPARK_REVISION = SparkBuildInfo.spark_revision - val SPARK_BUILD_USER = SparkBuildInfo.spark_build_user - val SPARK_REPO_URL = SparkBuildInfo.spark_repo_url - val SPARK_BUILD_DATE = SparkBuildInfo.spark_build_date - val SPARK_DOC_ROOT = SparkBuildInfo.spark_doc_root + val SPARK_VERSION: String = SparkBuildInfo.spark_version + val SPARK_VERSION_SHORT: String = VersionUtils.shortVersion(SparkBuildInfo.spark_version) + val SPARK_BRANCH: String = SparkBuildInfo.spark_branch + val SPARK_REVISION: String = SparkBuildInfo.spark_revision + val SPARK_BUILD_USER: String = SparkBuildInfo.spark_build_user + val SPARK_REPO_URL: String = SparkBuildInfo.spark_repo_url + val SPARK_BUILD_DATE: String = SparkBuildInfo.spark_build_date + val SPARK_DOC_ROOT: String = SparkBuildInfo.spark_doc_root } - diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index cc27686b6b335..e585d5dd2b25c 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -425,6 +425,8 @@ object SparkBuild extends PomBuild { } /* Generate and pick the spark build info from extra-resources */ + enable(CommonUtils.settings)(commonUtils) + enable(Core.settings)(core) /* Unsafe settings */ @@ -626,27 +628,13 @@ object SparkParallelTestGrouping { ) } -object Core { +object CommonUtils { import scala.sys.process.Process - import BuildCommons.protoVersion def buildenv = Process(Seq("uname")).!!.trim.replaceFirst("[^A-Za-z0-9].*", "").toLowerCase def bashpath = Process(Seq("where", "bash")).!!.split("[\r\n]+").head.replace('\\', '/') lazy val settings = Seq( - // Setting version for the protobuf compiler. This has to be propagated to every sub-project - // even if the project is not using it. - PB.protocVersion := BuildCommons.protoVersion, - // For some reason the resolution from the imported Maven build does not work for some - // of these dependendencies that we need to shade later on. - libraryDependencies ++= { - Seq( - "com.google.protobuf" % "protobuf-java" % protoVersion % "protobuf" - ) - }, - (Compile / PB.targets) := Seq( - PB.gens.java -> (Compile / sourceManaged).value - ), (Compile / resourceGenerators) += Def.task { - val buildScript = baseDirectory.value + "/../build/spark-build-info" + val buildScript = baseDirectory.value + "/../../build/spark-build-info" val targetDir = baseDirectory.value + "/target/extra-resources/" // support Windows build under cygwin/mingw64, etc val bash = buildenv match { @@ -658,6 +646,25 @@ object Core { val propsFile = baseDirectory.value / "target" / "extra-resources" / "spark-version-info.properties" Seq(propsFile) }.taskValue + ) +} + +object Core { + import BuildCommons.protoVersion + lazy val settings = Seq( + // Setting version for the protobuf compiler. This has to be propagated to every sub-project + // even if the project is not using it. + PB.protocVersion := BuildCommons.protoVersion, + // For some reason the resolution from the imported Maven build does not work for some + // of these dependendencies that we need to shade later on. + libraryDependencies ++= { + Seq( + "com.google.protobuf" % "protobuf-java" % protoVersion % "protobuf" + ) + }, + (Compile / PB.targets) := Seq( + PB.gens.java -> (Compile / sourceManaged).value + ) ) ++ { val sparkProtocExecPath = sys.props.get("spark.protoc.executable.path") if (sparkProtocExecPath.isDefined) {