Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 62 additions & 0 deletions common/utils/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,69 @@
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
<resources>
<resource>
<directory>${project.basedir}/src/main/resources</directory>
</resource>
<resource>
<!-- Include the properties file to provide the build information. -->
<directory>${project.build.directory}/extra-resources</directory>
<filtering>true</filtering>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>choose-shell-and-script</id>
<phase>validate</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<exportAntProperties>true</exportAntProperties>
<target>
<condition property="shell" value="powershell.exe" else="bash">
<and>
<os family="windows"/>
</and>
</condition>
<condition property="spark-build-info-script" value="spark-build-info.ps1"
else="spark-build-info">
<and>
<os family="windows"/>
</and>
</condition>
<echo>Shell to use for generating spark-version-info.properties file =
${shell}
</echo>
<echo>Script to use for generating spark-version-info.properties file =
${spark-build-info-script}
</echo>
</target>
</configuration>
</execution>
<execution>
<id>generate-spark-build-info</id>
<phase>generate-resources</phase>
<configuration>
<!-- Execute the shell script to generate the spark build information. -->
<target>
<exec executable="${shell}">
<arg value="${project.basedir}/../../build/${spark-build-info-script}"/>
<arg value="${project.build.directory}/extra-resources"/>
<arg value="${project.version}"/>
</exec>
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
65 changes: 65 additions & 0 deletions common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark

import java.util.Properties

object SparkBuildInfo {

val (
spark_version: String,
spark_branch: String,
spark_revision: String,
spark_build_user: String,
spark_repo_url: String,
spark_build_date: String,
spark_doc_root: String) = {

val resourceStream = Thread.currentThread().getContextClassLoader.
getResourceAsStream("spark-version-info.properties")
if (resourceStream == null) {
throw new SparkException("Could not find spark-version-info.properties")
}

try {
val unknownProp = "<unknown>"
val props = new Properties()
props.load(resourceStream)
(
props.getProperty("version", unknownProp),
props.getProperty("branch", unknownProp),
props.getProperty("revision", unknownProp),
props.getProperty("user", unknownProp),
props.getProperty("url", unknownProp),
props.getProperty("date", unknownProp),
props.getProperty("docroot", unknownProp)
)
} catch {
case e: Exception =>
throw new SparkException("Error loading properties from spark-version-info.properties", e)
} finally {
if (resourceStream != null) {
try {
resourceStream.close()
} catch {
case e: Exception =>
throw new SparkException("Error closing spark build info resource stream", e)
}
}
}
}
}
62 changes: 0 additions & 62 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -513,69 +513,7 @@
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
<resources>
<resource>
<directory>${project.basedir}/src/main/resources</directory>
</resource>
<resource>
<!-- Include the properties file to provide the build information. -->
<directory>${project.build.directory}/extra-resources</directory>
<filtering>true</filtering>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>choose-shell-and-script</id>
<phase>validate</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<exportAntProperties>true</exportAntProperties>
<target>
<condition property="shell" value="powershell.exe" else="bash">
<and>
<os family="windows"/>
</and>
</condition>
<condition property="spark-build-info-script" value="spark-build-info.ps1"
else="spark-build-info">
<and>
<os family="windows"/>
</and>
</condition>
<echo>Shell to use for generating spark-version-info.properties file =
${shell}
</echo>
<echo>Script to use for generating spark-version-info.properties file =
${spark-build-info-script}
</echo>
</target>
</configuration>
</execution>
<execution>
<id>generate-spark-build-info</id>
<phase>generate-resources</phase>
<configuration>
<!-- Execute the shell script to generate the spark build information. -->
<target>
<exec executable="${shell}">
<arg value="${project.basedir}/../build/${spark-build-info-script}"/>
<arg value="${project.build.directory}/extra-resources"/>
<arg value="${project.version}"/>
</exec>
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
Expand Down
66 changes: 8 additions & 58 deletions core/src/main/scala/org/apache/spark/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache

import java.util.Properties

import org.apache.spark.util.VersionUtils

/**
Expand All @@ -45,60 +43,12 @@ import org.apache.spark.util.VersionUtils
* level interfaces. These are subject to changes or removal in minor releases.
*/
package object spark {

private object SparkBuildInfo {

val (
spark_version: String,
spark_branch: String,
spark_revision: String,
spark_build_user: String,
spark_repo_url: String,
spark_build_date: String,
spark_doc_root: String) = {

val resourceStream = Thread.currentThread().getContextClassLoader.
getResourceAsStream("spark-version-info.properties")
if (resourceStream == null) {
throw new SparkException("Could not find spark-version-info.properties")
}

try {
val unknownProp = "<unknown>"
val props = new Properties()
props.load(resourceStream)
(
props.getProperty("version", unknownProp),
props.getProperty("branch", unknownProp),
props.getProperty("revision", unknownProp),
props.getProperty("user", unknownProp),
props.getProperty("url", unknownProp),
props.getProperty("date", unknownProp),
props.getProperty("docroot", unknownProp)
)
} catch {
case e: Exception =>
throw new SparkException("Error loading properties from spark-version-info.properties", e)
} finally {
if (resourceStream != null) {
try {
resourceStream.close()
} catch {
case e: Exception =>
throw new SparkException("Error closing spark build info resource stream", e)
}
}
}
}
}

val SPARK_VERSION = SparkBuildInfo.spark_version
val SPARK_VERSION_SHORT = VersionUtils.shortVersion(SparkBuildInfo.spark_version)
val SPARK_BRANCH = SparkBuildInfo.spark_branch
val SPARK_REVISION = SparkBuildInfo.spark_revision
val SPARK_BUILD_USER = SparkBuildInfo.spark_build_user
val SPARK_REPO_URL = SparkBuildInfo.spark_repo_url
val SPARK_BUILD_DATE = SparkBuildInfo.spark_build_date
val SPARK_DOC_ROOT = SparkBuildInfo.spark_doc_root
val SPARK_VERSION: String = SparkBuildInfo.spark_version
val SPARK_VERSION_SHORT: String = VersionUtils.shortVersion(SparkBuildInfo.spark_version)
val SPARK_BRANCH: String = SparkBuildInfo.spark_branch
val SPARK_REVISION: String = SparkBuildInfo.spark_revision
val SPARK_BUILD_USER: String = SparkBuildInfo.spark_build_user
val SPARK_REPO_URL: String = SparkBuildInfo.spark_repo_url
val SPARK_BUILD_DATE: String = SparkBuildInfo.spark_build_date
val SPARK_DOC_ROOT: String = SparkBuildInfo.spark_doc_root
}

39 changes: 23 additions & 16 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -425,6 +425,8 @@ object SparkBuild extends PomBuild {
}

/* Generate and pick the spark build info from extra-resources */
enable(CommonUtils.settings)(commonUtils)

enable(Core.settings)(core)

/* Unsafe settings */
Expand Down Expand Up @@ -626,27 +628,13 @@ object SparkParallelTestGrouping {
)
}

object Core {
object CommonUtils {
import scala.sys.process.Process
import BuildCommons.protoVersion
def buildenv = Process(Seq("uname")).!!.trim.replaceFirst("[^A-Za-z0-9].*", "").toLowerCase
def bashpath = Process(Seq("where", "bash")).!!.split("[\r\n]+").head.replace('\\', '/')
lazy val settings = Seq(
// Setting version for the protobuf compiler. This has to be propagated to every sub-project
// even if the project is not using it.
PB.protocVersion := BuildCommons.protoVersion,
// For some reason the resolution from the imported Maven build does not work for some
// of these dependendencies that we need to shade later on.
libraryDependencies ++= {
Seq(
"com.google.protobuf" % "protobuf-java" % protoVersion % "protobuf"
)
},
(Compile / PB.targets) := Seq(
PB.gens.java -> (Compile / sourceManaged).value
),
(Compile / resourceGenerators) += Def.task {
val buildScript = baseDirectory.value + "/../build/spark-build-info"
val buildScript = baseDirectory.value + "/../../build/spark-build-info"
val targetDir = baseDirectory.value + "/target/extra-resources/"
// support Windows build under cygwin/mingw64, etc
val bash = buildenv match {
Expand All @@ -658,6 +646,25 @@ object Core {
val propsFile = baseDirectory.value / "target" / "extra-resources" / "spark-version-info.properties"
Seq(propsFile)
}.taskValue
)
}

object Core {
import BuildCommons.protoVersion
lazy val settings = Seq(
// Setting version for the protobuf compiler. This has to be propagated to every sub-project
// even if the project is not using it.
PB.protocVersion := BuildCommons.protoVersion,
// For some reason the resolution from the imported Maven build does not work for some
// of these dependendencies that we need to shade later on.
libraryDependencies ++= {
Seq(
"com.google.protobuf" % "protobuf-java" % protoVersion % "protobuf"
)
},
(Compile / PB.targets) := Seq(
PB.gens.java -> (Compile / sourceManaged).value
)
) ++ {
val sparkProtocExecPath = sys.props.get("spark.protoc.executable.path")
if (sparkProtocExecPath.isDefined) {
Expand Down