Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 73 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@
<maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm</maven.build.timestamp.format>
<buildDate>${maven.build.timestamp}</buildDate>
<compileSource>1.8</compileSource>
<releaseTarget>8</releaseTarget>
<java.min.version>${compileSource}</java.min.version>
<!-- override on command line to have generated LICENSE files include
diagnostic info for verifying notice requirements -->
Expand Down Expand Up @@ -173,6 +174,44 @@
<!-- Default to redirecting test logs
This only applies to the java tests, it's not obvious how get scalatest to do this. -->
<test.output.tofile>true</test.output.tofile>
<jacocoArgLine/>
<argLine/>
<spark-ut.args>-Xmx1536m -XX:ReservedCodeCacheSize=512m</spark-ut.args>
<spark-ut.argLine>${argLine} ${spark-ut.args}</spark-ut.argLine>
<!-- To Run IT Tests with a particular Xmx Value use -Dfailsafe.Xmx=XXXg -->
<failsafe.Xmx>4g</failsafe.Xmx>
<spark-it.args>-enableassertions -Xmx${failsafe.Xmx}
-Djava.security.egd=file:/dev/./urandom -XX:+CMSClassUnloadingEnabled
-verbose:gc -XX:+PrintCommandLineFlags -XX:+PrintFlagsFinal -XX:+IgnoreUnrecognizedVMOptions</spark-it.args>
<spark-it.argLine>${argLine} ${spark-it.args}</spark-it.argLine>
<hbase-surefire.argLine>-Djava.security.manager=allow</hbase-surefire.argLine>
<!-- Keep in sync with jvm flags in bin/hbase in main repo; Copied from there! -->
<hbase-surefire.jdk11.flags>-Dorg.apache.hbase.thirdparty.io.netty.tryReflectionSetAccessible=true
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do we need jdk11 any more? i think should drop this altogether and be only on jdk17?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

just double checked, branch-2.6 still has jdk11

--add-modules jdk.unsupported
--add-opens java.base/java.io=ALL-UNNAMED
--add-opens java.base/java.nio=ALL-UNNAMED
--add-opens java.base/sun.nio.ch=ALL-UNNAMED
--add-opens java.base/java.lang=ALL-UNNAMED
--add-opens java.base/jdk.internal.ref=ALL-UNNAMED
--add-opens java.base/java.lang.reflect=ALL-UNNAMED
--add-opens java.base/java.util=ALL-UNNAMED
--add-opens java.base/java.util.concurrent=ALL-UNNAMED
--add-exports java.base/jdk.internal.misc=ALL-UNNAMED
--add-exports java.security.jgss/sun.security.krb5=ALL-UNNAMED
--add-exports java.base/sun.net.dns=ALL-UNNAMED
--add-exports java.base/sun.net.util=ALL-UNNAMED</hbase-surefire.jdk11.flags>
<hbase-surefire.jdk17.flags/>
<!-- Keep in sync with JavaModuleOptions.java from spark codebase;
Copied from there and next removed all duplicate hbase jvm flags and retained remaining ones -->
<spark-surefire.jdk11.flags>--add-opens=java.base/java.lang.invoke=ALL-UNNAMED
--add-opens=java.base/java.net=ALL-UNNAMED
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED
--add-opens=java.base/sun.nio.cs=ALL-UNNAMED
--add-opens=java.base/sun.security.action=ALL-UNNAMED
--add-opens=java.base/sun.util.calendar=ALL-UNNAMED
-Djdk.reflect.useDirectMethodHandle=false
-Dio.netty.tryReflectionSetAccessible=true</spark-surefire.jdk11.flags>
<spark-surefire.jdk17.flags/>
</properties>
<dependencyManagement>
<dependencies>
Expand Down Expand Up @@ -935,6 +974,40 @@
</plugins>
</build>
</profile>
<profile>
<id>build-with-jdk8</id>
<activation>
<jdk>1.8</jdk>
</activation>
<properties>
<maven.compiler.source>${compileSource}</maven.compiler.source>
<maven.compiler.target>${compileSource}</maven.compiler.target>
</properties>
</profile>
<profile>
<id>build-with-jdk11</id>
<activation>
<jdk>[11,)</jdk>
</activation>
<properties>
<maven.compiler.release>${releaseTarget}</maven.compiler.release>
<argLine>${hbase-surefire.jdk11.flags} ${spark-surefire.jdk11.flags}
${hbase-surefire.argLine}
${jacocoArgLine}</argLine>
</properties>
</profile>
<profile>
<id>build-with-jdk17</id>
<activation>
<jdk>[17,)</jdk>
</activation>
<properties>
<argLine>${hbase-surefire.jdk11.flags} ${spark-surefire.jdk11.flags}
${hbase-surefire.jdk17.flags} ${spark-surefire.jdk17.flags}
${hbase-surefire.argLine}
${jacocoArgLine}</argLine>
</properties>
</profile>
<profile>
<!-- Use Mac x64 version of protoc for Apple Silicon (aarch64) Macs -->
<id>osx-aarch64</id>
Expand Down
6 changes: 1 addition & 5 deletions spark/hbase-spark-it/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,6 @@
<!-- Test inclusion patterns used by failsafe configuration -->
<unittest.include>**/Test*.java</unittest.include>
<integrationtest.include>**/IntegrationTest*.java</integrationtest.include>
<!-- To Run Tests with a particular Xmx Value use -Dfailsafe.Xmx=XXXg -->
<failsafe.Xmx>4g</failsafe.Xmx>
<!-- To run a single integration test, use -Dit.test=IntegrationTestXXX -->
</properties>

Expand Down Expand Up @@ -278,9 +276,7 @@
<!-- TODO: failsafe does timeout, but verify does not fail the build because of the timeout.
I believe it is a failsafe bug, we may consider using surefire -->
<forkedProcessTimeoutInSeconds>1800</forkedProcessTimeoutInSeconds>
<argLine>-enableassertions -Xmx${failsafe.Xmx}
-Djava.security.egd=file:/dev/./urandom -XX:+CMSClassUnloadingEnabled
-verbose:gc -XX:+PrintCommandLineFlags -XX:+PrintFlagsFinal</argLine>
<argLine>${spark-it.argLine}</argLine>
</configuration>
</plugin>
<plugin>
Expand Down
25 changes: 1 addition & 24 deletions spark/hbase-spark/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -438,27 +438,6 @@
</activation>
<build>
<plugins>
<!--This is ugly but saves having to mess with profiles.
This plugin uses groovy (yuck) just to set a single
variable, target.jvm, dependent on what the value of
scala version is.-->
<plugin>
<groupId>org.codehaus.gmaven</groupId>
<artifactId>gmaven-plugin</artifactId>
<version>1.5</version>
<executions>
<execution>
<goals>
<goal>execute</goal>
</goals>
<phase>validate</phase>
<configuration>
<source><![CDATA[pom.properties['target.jvm'] =
pom.properties['scala.version'].startsWith('2.10')? '': '-target:jvm-1.8';]]></source>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
Expand All @@ -468,8 +447,6 @@
<scalaVersion>${scala.version}</scalaVersion>
<args>
<arg>-feature</arg>
<!--The target.jvm variable gets set above by the groovy
snippet in the gmaven-plugin.-->
<arg>${target.jvm}</arg>
</args>
<source>${compileSource}</source>
Expand Down Expand Up @@ -511,7 +488,7 @@
</goals>
<phase>test</phase>
<configuration>
<argLine>-Xmx1536m -XX:ReservedCodeCacheSize=512m</argLine>
<argLine>${spark-ut.argLine}</argLine>
<parallel>false</parallel>
</configuration>
</execution>
Expand Down
1 change: 1 addition & 0 deletions spark/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
Please take caution when this version is modified -->
<scala.version>2.12.15</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<target.jvm>-target:jvm-1.8</target.jvm>
</properties>

<dependencyManagement>
Expand Down