From fd7abc4607595a2957fea0bb864d9c957902053c Mon Sep 17 00:00:00 2001 From: Tom White Date: Mon, 7 Oct 2019 17:25:09 +0100 Subject: [PATCH] Build and test on Java 11 (#6119) * Use Spark 2.4 with Scala 2.12 * Use adam-core-spark2_2.12:0.28.0 * Serializable identityFunction * Fix 'Could not serialize lambda' for AssemblyRegionWalkerSpark * Suppress deprecation warnings from closeQuietly in Commons IO * Try testing on Java 11 on Travis * Don't use docker for Java 11 * Pass in scala version in non-docker test * Upgrade mockito to version that works with Java 11 * Skip PileupSparkIntegrationTest#testFeaturesPileupHdfs which fails on Java 11 due a Spark error that is not fixed until Spark 3. --- .travis.yml | 30 +++++++++++-------- build.gradle | 29 +++++++++++------- .../hellbender/engine/FeatureDataSource.java | 6 ++-- .../spark/AssemblyRegionWalkerSpark.java | 3 +- .../spark/datasources/ReadsSparkSink.java | 4 +-- .../ReferenceTwoBitSparkSource.java | 11 +++++-- .../hellbender/utils/Utils.java | 13 ++++++++ .../hellbender/utils/io/IOUtils.java | 1 + .../BDGAlignmentRecordToGATKReadAdapter.java | 7 ++--- ...GATKReadToBDGAlignmentRecordConverter.java | 12 ++++---- .../utils/runtime/CapturedStreamOutput.java | 1 + .../utils/runtime/ProcessController.java | 2 ++ .../runtime/StreamingProcessController.java | 1 + .../spark/PileupSparkIntegrationTest.java | 6 +++- .../utils/read/GATKReadAdaptersUnitTest.java | 2 -- 15 files changed, 84 insertions(+), 44 deletions(-) diff --git a/.travis.yml b/.travis.yml index f2928000b39..2dae44be384 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,16 +9,16 @@ jdk: - openjdk8 env: matrix: - - TEST_TYPE=cloud UPLOAD=true TESTS_REQUIRE_GCLOUD=true - - TEST_TYPE=integration TEST_DOCKER=true TEST_VERBOSITY=minimal - - TEST_TYPE=unit TEST_DOCKER=true TEST_VERBOSITY=minimal - - TEST_TYPE=variantcalling TEST_DOCKER=true TEST_VERBOSITY=minimal - - TEST_TYPE=python TEST_DOCKER=true TEST_VERBOSITY=minimal - - RUN_CNV_GERMLINE_COHORT_WDL=true TESTS_REQUIRE_GCLOUD=true - - RUN_CNV_GERMLINE_CASE_WDL=true TESTS_REQUIRE_GCLOUD=true - - RUN_CNV_SOMATIC_WDL=true TESTS_REQUIRE_GCLOUD=true - - RUN_M2_WDL=true TESTS_REQUIRE_GCLOUD=true - - RUN_CNN_WDL=true TESTS_REQUIRE_GCLOUD=true + - SCALA_VERSION=2.11 TEST_TYPE=cloud UPLOAD=true TESTS_REQUIRE_GCLOUD=true + - SCALA_VERSION=2.11 TEST_TYPE=integration TEST_DOCKER=true TEST_VERBOSITY=minimal + - SCALA_VERSION=2.11 TEST_TYPE=unit TEST_DOCKER=true TEST_VERBOSITY=minimal + - SCALA_VERSION=2.11 TEST_TYPE=variantcalling TEST_DOCKER=true TEST_VERBOSITY=minimal + - SCALA_VERSION=2.11 TEST_TYPE=python TEST_DOCKER=true TEST_VERBOSITY=minimal + - SCALA_VERSION=2.11 RUN_CNV_GERMLINE_COHORT_WDL=true TESTS_REQUIRE_GCLOUD=true + - SCALA_VERSION=2.11 RUN_CNV_GERMLINE_CASE_WDL=true TESTS_REQUIRE_GCLOUD=true + - SCALA_VERSION=2.11 RUN_CNV_SOMATIC_WDL=true TESTS_REQUIRE_GCLOUD=true + - SCALA_VERSION=2.11 RUN_M2_WDL=true TESTS_REQUIRE_GCLOUD=true + - SCALA_VERSION=2.11 RUN_CNN_WDL=true TESTS_REQUIRE_GCLOUD=true global: #gradle needs this - TERM=dumb @@ -47,7 +47,11 @@ matrix: fast_finish: true include: - jdk: oraclejdk8 - env: TEST_TYPE=integration TEST_VERBOSITY=minimal TEST_REQUIRE_GCLOUD=true + env: SCALA_VERSION=2.11 TEST_TYPE=integration TEST_VERBOSITY=minimal TEST_REQUIRE_GCLOUD=true + - jdk: openjdk11 + env: SCALA_VERSION=2.12 TEST_TYPE=integration TEST_VERBOSITY=minimal + - jdk: openjdk11 + env: SCALA_VERSION=2.12 TEST_TYPE=unit TEST_VERBOSITY=minimal before_cache: - rm -f $HOME/.gradle/caches/modules-2/modules-2.lock - rm -fr $HOME/.gradle/caches/*/plugin-resolution/ @@ -162,13 +166,13 @@ script: sudo mkdir -p build/reports/; sudo chmod -R a+w build/reports/; cp scripts/docker/dockertest.gradle .; - sudo docker run -v $(pwd):/gatkCloneMountPoint:cached -v $(pwd)/testJars:/jars:cached --rm -e "TEST_VERBOSITY=minimal" -e "TEST_TYPE=${TEST_TYPE}" -t broadinstitute/gatk:${DOCKER_TAG} bash --init-file /gatk/gatkenv.rc /root/run_unit_tests.sh; + sudo docker run -v $(pwd):/gatkCloneMountPoint:cached -v $(pwd)/testJars:/jars:cached --rm -e "scala.version=${SCALA_VERSION}" -e "TEST_VERBOSITY=minimal" -e "TEST_TYPE=${TEST_TYPE}" -t broadinstitute/gatk:${DOCKER_TAG} bash --init-file /gatk/gatkenv.rc /root/run_unit_tests.sh; TEST_EXIT_VALUE=$?; sudo mkdir build/reports/tests/test && sudo cp -rp build/reports/tests/testOnPackagedReleaseJar/* build/reports/tests/test && sudo rm -r build/reports/tests/testOnPackagedReleaseJar; $( exit ${TEST_EXIT_VALUE} ); else ./gatk PrintReads -I src/test/resources/NA12878.chr17_69k_70k.dictFix.bam -O output.bam; - travis_wait 50 ./gradlew jacocoTestReport; + travis_wait 50 ./gradlew -Dscala.version=${SCALA_VERSION} jacocoTestReport; fi; # This creates and uploads the gatk zip file to the nightly build bucket, only keeping the 10 newest entries # This also constructs the Docker image and uploads it to https://cloud.docker.com/u/broadinstitute/repository/docker/broadinstitute/gatk-nightly/ diff --git a/build.gradle b/build.gradle index 346e4dabe44..6ab5b33ad74 100644 --- a/build.gradle +++ b/build.gradle @@ -57,11 +57,11 @@ repositories { mavenLocal() } -final requiredJavaVersion = "8" final htsjdkVersion = System.getProperty('htsjdk.version','2.20.3') final picardVersion = System.getProperty('picard.version','2.20.7') final barclayVersion = System.getProperty('barclay.version','2.1.0') final sparkVersion = System.getProperty('spark.version', '2.4.3') +final scalaVersion = System.getProperty('scala.version', '2.11') final hadoopVersion = System.getProperty('hadoop.version', '2.8.2') final disqVersion = System.getProperty('disq.version','0.3.3') final genomicsdbVersion = System.getProperty('genomicsdb.version','1.1.2') @@ -131,15 +131,23 @@ def looksLikeWereInAGitRepository(){ file(".git").isDirectory() || (file(".git").exists() && file(".git").text.startsWith("gitdir")) } -// Ensure that we have the right JDK version, a clone of the git repository, and resolve any required git-lfs +// Ensure that we have a clone of the git repository, and resolve any required git-lfs // resource files that are needed to run the build but are still lfs stub files. -def ensureBuildPrerequisites(requiredJavaVersion, largeResourcesFolder, buildPrerequisitesMessage) { - // Make sure we can get a ToolProvider class loader. If not we may have just a JRE, or a JDK from the future. - if (ToolProvider.getSystemToolClassLoader() == null) { +def ensureBuildPrerequisites(largeResourcesFolder, buildPrerequisitesMessage) { + if (!JavaVersion.current().isJava8Compatible()) { + throw new GradleException( + "Java 8 or later is required to build GATK, but ${JavaVersion.current()} was found. " + + "$buildPrerequisitesMessage") + } + // Make sure we can get a ToolProvider class loader (for Java 8). If not we may have just a JRE. + if (JavaVersion.current().isJava8() && ToolProvider.getSystemToolClassLoader() == null) { throw new GradleException( "The ClassLoader obtained from the Java ToolProvider is null. " + "A Java $requiredJavaVersion JDK must be installed. $buildPrerequisitesMessage") } + if (!JavaVersion.current().isJava8() && !JavaVersion.current().isJava11()) { + println("Warning: using Java ${JavaVersion.current()} but only Java 8 and Java 11 have been tested.") + } if (!looksLikeWereInAGitRepository()) { throw new GradleException("This doesn't appear to be a git folder. " + "The GATK Github repository must be cloned using \"git clone\" to run the build. " + @@ -150,7 +158,7 @@ def ensureBuildPrerequisites(requiredJavaVersion, largeResourcesFolder, buildPre resolveLargeResourceStubFiles(largeResourcesFolder, buildPrerequisitesMessage) } -ensureBuildPrerequisites(requiredJavaVersion, largeResourcesFolder, buildPrerequisitesMessage) +ensureBuildPrerequisites(largeResourcesFolder, buildPrerequisitesMessage) configurations.all { resolutionStrategy { @@ -221,7 +229,7 @@ configurations { // Get the jdk files we need to run javaDoc. We need to use these during compile, testCompile, // test execution, and gatkDoc generation, but we don't want them as part of the runtime // classpath and we don't want to redistribute them in the uber jar. -final javadocJDKFiles = files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs()) +final javadocJDKFiles = ToolProvider.getSystemToolClassLoader() == null ? files([]) : files(((URLClassLoader) ToolProvider.getSystemToolClassLoader()).getURLs()) dependencies { // javadoc utilities; compile/test only to prevent redistribution of sdk jars @@ -280,15 +288,16 @@ dependencies { compile ('org.ojalgo:ojalgo-commons-math3:1.0.0') { exclude group: 'org.apache.commons' } - compile ('org.apache.spark:spark-mllib_2.11:' + sparkVersion) { + compile ('org.apache.spark:spark-mllib_' + scalaVersion + ':' + sparkVersion) { // JUL is used by Google Dataflow as the backend logger, so exclude jul-to-slf4j to avoid a loop exclude module: 'jul-to-slf4j' exclude module: 'javax.servlet' exclude module: 'servlet-api' } + compile 'com.thoughtworks.paranamer:paranamer:2.8' compile 'org.bdgenomics.bdg-formats:bdg-formats:0.5.0' - compile('org.bdgenomics.adam:adam-core-spark2_2.11:0.20.0') { + compile('org.bdgenomics.adam:adam-core-spark2_' + scalaVersion + ':0.28.0') { exclude group: 'org.slf4j' exclude group: 'org.apache.hadoop' exclude group: 'org.scala-lang' @@ -349,7 +358,7 @@ dependencies { testCompile sourceSets.testUtils.output - testCompile "org.mockito:mockito-core:2.10.0" + testCompile "org.mockito:mockito-core:2.28.2" testCompile "com.google.jimfs:jimfs:1.1" } diff --git a/src/main/java/org/broadinstitute/hellbender/engine/FeatureDataSource.java b/src/main/java/org/broadinstitute/hellbender/engine/FeatureDataSource.java index b5eb1e65fd9..af48d1d522f 100644 --- a/src/main/java/org/broadinstitute/hellbender/engine/FeatureDataSource.java +++ b/src/main/java/org/broadinstitute/hellbender/engine/FeatureDataSource.java @@ -271,8 +271,8 @@ public FeatureDataSource(final FeatureInput featureInput, final int queryLook Utils.nonNull(genomicsDBOptions, "GenomicsDBOptions must not be null. Calling tool may not read from a GenomicsDB data source."); } - final Function cloudWrapper = (cloudPrefetchBuffer > 0 ? is -> SeekableByteChannelPrefetcher.addPrefetcher(cloudPrefetchBuffer, is) : Function.identity()); - final Function cloudIndexWrapper = (cloudIndexPrefetchBuffer > 0 ? is -> SeekableByteChannelPrefetcher.addPrefetcher(cloudIndexPrefetchBuffer, is) : Function.identity()); + final Function cloudWrapper = (cloudPrefetchBuffer > 0 ? is -> SeekableByteChannelPrefetcher.addPrefetcher(cloudPrefetchBuffer, is) : Utils.identityFunction()); + final Function cloudIndexWrapper = (cloudIndexPrefetchBuffer > 0 ? is -> SeekableByteChannelPrefetcher.addPrefetcher(cloudIndexPrefetchBuffer, is) : Utils.identityFunction()); // Create a feature reader without requiring an index. We will require one ourselves as soon as // a query by interval is attempted. @@ -371,7 +371,7 @@ private static FeatureReader getFeatureReader(final Featu if (BucketUtils.isCloudStorageUrl(featureInput)) { return AbstractFeatureReader.getFeatureReader(absoluteRawPath, null, codec, requireIndex, cloudWrapper, cloudIndexWrapper); } else { - return AbstractFeatureReader.getFeatureReader(absoluteRawPath, null, codec, requireIndex, Function.identity(), Function.identity()); + return AbstractFeatureReader.getFeatureReader(absoluteRawPath, null, codec, requireIndex, Utils.identityFunction(), Utils.identityFunction()); } } catch (final TribbleException e) { throw new GATKException("Error initializing feature reader for path " + featureInput.getFeaturePath(), e); diff --git a/src/main/java/org/broadinstitute/hellbender/engine/spark/AssemblyRegionWalkerSpark.java b/src/main/java/org/broadinstitute/hellbender/engine/spark/AssemblyRegionWalkerSpark.java index 6be05bb8c34..a0483a6616b 100644 --- a/src/main/java/org/broadinstitute/hellbender/engine/spark/AssemblyRegionWalkerSpark.java +++ b/src/main/java/org/broadinstitute/hellbender/engine/spark/AssemblyRegionWalkerSpark.java @@ -23,6 +23,7 @@ import org.broadinstitute.hellbender.utils.io.IOUtils; import org.broadinstitute.hellbender.utils.read.GATKRead; +import java.io.Serializable; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -90,7 +91,7 @@ protected Broadcast> assemblyRegionEvaluatorSu } private static Broadcast> assemblyRegionEvaluatorSupplierBroadcastFunction(final JavaSparkContext ctx, final AssemblyRegionEvaluator assemblyRegionEvaluator) { - Supplier supplier = () -> assemblyRegionEvaluator; + Supplier supplier = (Supplier & Serializable) (() -> assemblyRegionEvaluator); return ctx.broadcast(supplier); } diff --git a/src/main/java/org/broadinstitute/hellbender/engine/spark/datasources/ReadsSparkSink.java b/src/main/java/org/broadinstitute/hellbender/engine/spark/datasources/ReadsSparkSink.java index d971b3a039f..ddeb9eb913a 100644 --- a/src/main/java/org/broadinstitute/hellbender/engine/spark/datasources/ReadsSparkSink.java +++ b/src/main/java/org/broadinstitute/hellbender/engine/spark/datasources/ReadsSparkSink.java @@ -13,7 +13,7 @@ import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.broadcast.Broadcast; -import org.bdgenomics.adam.models.RecordGroupDictionary; +import org.bdgenomics.adam.models.ReadGroupDictionary; import org.bdgenomics.adam.models.SequenceDictionary; import org.bdgenomics.formats.avro.AlignmentRecord; import org.broadinstitute.hellbender.exceptions.GATKException; @@ -157,7 +157,7 @@ private static void writeReadsADAM( final JavaSparkContext ctx, final String outputFile, final JavaRDD reads, final SAMFileHeader header) throws IOException { final SequenceDictionary seqDict = SequenceDictionary.fromSAMSequenceDictionary(header.getSequenceDictionary()); - final RecordGroupDictionary readGroups = RecordGroupDictionary.fromSAMHeader(header); + final ReadGroupDictionary readGroups = ReadGroupDictionary.fromSAMHeader(header); final JavaPairRDD rddAlignmentRecords = reads.map(read -> { read.setHeaderStrict(header); diff --git a/src/main/java/org/broadinstitute/hellbender/engine/spark/datasources/ReferenceTwoBitSparkSource.java b/src/main/java/org/broadinstitute/hellbender/engine/spark/datasources/ReferenceTwoBitSparkSource.java index d5d4949723e..30f51785747 100644 --- a/src/main/java/org/broadinstitute/hellbender/engine/spark/datasources/ReferenceTwoBitSparkSource.java +++ b/src/main/java/org/broadinstitute/hellbender/engine/spark/datasources/ReferenceTwoBitSparkSource.java @@ -6,15 +6,19 @@ import org.bdgenomics.adam.models.ReferenceRegion; import org.bdgenomics.adam.util.TwoBitFile; import org.bdgenomics.adam.util.TwoBitRecord; +import org.bdgenomics.formats.avro.Strand; import org.bdgenomics.utils.io.ByteAccess; import org.broadinstitute.hellbender.utils.SimpleInterval; import org.broadinstitute.hellbender.utils.Utils; import org.broadinstitute.hellbender.utils.gcs.BucketUtils; import org.broadinstitute.hellbender.utils.reference.ReferenceBases; +import scala.Tuple2; import scala.collection.JavaConversions; +import scala.collection.immutable.IndexedSeq; import java.io.IOException; import java.io.Serializable; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -39,7 +43,10 @@ public ReferenceTwoBitSparkSource( String referenceURL) throws IOException { byte[] bytes = ByteStreams.toByteArray(BucketUtils.openFile(this.referenceURL)); ByteAccess byteAccess = new DirectFullByteArrayByteAccess(bytes); this.twoBitFile = new TwoBitFile(byteAccess); - this.twoBitSeqEntries = JavaConversions.mapAsJavaMap(twoBitFile.seqRecords()); + this.twoBitSeqEntries = new LinkedHashMap<>(); + for (Tuple2 pair: JavaConversions.seqAsJavaList(twoBitFile.seqRecords())) { + twoBitSeqEntries.put(pair._1, pair._2); + } } /** @@ -74,7 +81,7 @@ private static ReferenceRegion simpleIntervalToReferenceRegion(SimpleInterval in String contig = interval.getContig(); long start = interval.getGA4GHStart(); long end = interval.getGA4GHEnd(); - return new ReferenceRegion(contig, start, end, null); + return new ReferenceRegion(contig, start, end, Strand.UNKNOWN); } private SimpleInterval cropIntervalAtContigEnd( final SimpleInterval interval ) { diff --git a/src/main/java/org/broadinstitute/hellbender/utils/Utils.java b/src/main/java/org/broadinstitute/hellbender/utils/Utils.java index b51658114b7..0f47e820de0 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/Utils.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/Utils.java @@ -19,6 +19,7 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; +import java.io.Serializable; import java.lang.reflect.Array; import java.math.BigInteger; import java.nio.file.Files; @@ -1102,6 +1103,18 @@ public static Stream stream(final Iterator iterator) { return stream(() -> iterator); } + /** + * Returns a function that always returns its input argument. Unlike {@link Function#identity()} the returned + * function is also serializable. + * + * @param the type of the input and output objects to the function + * @return a function that always returns its input argument + */ + @SuppressWarnings("unchecked") + public static Function identityFunction() { + return (Function & Serializable) t -> t; + } + /** * Like Guava's {@link Iterators#transform(Iterator, com.google.common.base.Function)}, but runs a fixed number * ({@code numThreads}) of transformations in parallel, while maintaining ordering of the output iterator. diff --git a/src/main/java/org/broadinstitute/hellbender/utils/io/IOUtils.java b/src/main/java/org/broadinstitute/hellbender/utils/io/IOUtils.java index eeac6d7b881..3589f5dc592 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/io/IOUtils.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/io/IOUtils.java @@ -216,6 +216,7 @@ public static File writeTempResourceFromPath(final String resourcePath, final Cl * @param resource Embedded resource. * @param file File path to write. */ + @SuppressWarnings("deprecation") public static void writeResource(Resource resource, File file) { String path = resource.getPath(); InputStream inputStream = resource.getResourceContentsAsStream(); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/BDGAlignmentRecordToGATKReadAdapter.java b/src/main/java/org/broadinstitute/hellbender/utils/read/BDGAlignmentRecordToGATKReadAdapter.java index ee40ccceb7e..1c71944eed1 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/BDGAlignmentRecordToGATKReadAdapter.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/BDGAlignmentRecordToGATKReadAdapter.java @@ -3,8 +3,7 @@ import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMRecord; import org.bdgenomics.adam.converters.AlignmentRecordConverter; -import org.bdgenomics.adam.models.RecordGroupDictionary; -import org.bdgenomics.adam.models.SAMFileHeaderWritable; +import org.bdgenomics.adam.models.ReadGroupDictionary; import org.bdgenomics.formats.avro.AlignmentRecord; /** @@ -30,8 +29,8 @@ public final class BDGAlignmentRecordToGATKReadAdapter extends SAMRecordToGATKRe private final AlignmentRecord alignmentRecord; public BDGAlignmentRecordToGATKReadAdapter(final AlignmentRecord alignmentRecord, final SAMFileHeader header) { - super(new AlignmentRecordConverter().convert(alignmentRecord, SAMFileHeaderWritable.apply(header), - RecordGroupDictionary.fromSAMHeader(header))); + super(new AlignmentRecordConverter().convert(alignmentRecord, header, + ReadGroupDictionary.fromSAMHeader(header))); this.alignmentRecord = alignmentRecord; } diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/GATKReadToBDGAlignmentRecordConverter.java b/src/main/java/org/broadinstitute/hellbender/utils/read/GATKReadToBDGAlignmentRecordConverter.java index eced1c5d26a..d56e72b7fa5 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/GATKReadToBDGAlignmentRecordConverter.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/GATKReadToBDGAlignmentRecordConverter.java @@ -5,7 +5,7 @@ import org.bdgenomics.formats.avro.AlignmentRecord; import org.bdgenomics.adam.converters.SAMRecordConverter; import org.bdgenomics.adam.models.SequenceDictionary; -import org.bdgenomics.adam.models.RecordGroupDictionary; +import org.bdgenomics.adam.models.ReadGroupDictionary; /** * Converts a GATKRead to a BDG AlignmentRecord @@ -15,27 +15,27 @@ public class GATKReadToBDGAlignmentRecordConverter { private SAMFileHeader header; private SequenceDictionary dict; - private RecordGroupDictionary readGroups; + private ReadGroupDictionary readGroups; public GATKReadToBDGAlignmentRecordConverter(SAMFileHeader header) { this.header = header; this.dict = SequenceDictionary.fromSAMSequenceDictionary(header.getSequenceDictionary()); - this.readGroups = RecordGroupDictionary.fromSAMHeader(header); + this.readGroups = ReadGroupDictionary.fromSAMHeader(header); } public static AlignmentRecord convert( final GATKRead gatkRead, final SAMFileHeader header ) { SequenceDictionary dict = SequenceDictionary.fromSAMSequenceDictionary(header.getSequenceDictionary()); - RecordGroupDictionary readGroups = RecordGroupDictionary.fromSAMHeader(header); + ReadGroupDictionary readGroups = ReadGroupDictionary.fromSAMHeader(header); return GATKReadToBDGAlignmentRecordConverter.convert(gatkRead, header, dict, readGroups); } public static AlignmentRecord convert( - final GATKRead gatkRead, final SAMFileHeader header, final SequenceDictionary dict, final RecordGroupDictionary readGroups ) { + final GATKRead gatkRead, final SAMFileHeader header, final SequenceDictionary dict, final ReadGroupDictionary readGroups ) { return converter.convert(gatkRead.convertToSAMRecord(header)); } public static AlignmentRecord convert( - final SAMRecord sam, final SequenceDictionary dict, final RecordGroupDictionary readGroups ) { + final SAMRecord sam, final SequenceDictionary dict, final ReadGroupDictionary readGroups ) { return converter.convert(sam); } } diff --git a/src/main/java/org/broadinstitute/hellbender/utils/runtime/CapturedStreamOutput.java b/src/main/java/org/broadinstitute/hellbender/utils/runtime/CapturedStreamOutput.java index 883f06e2c33..ecf39645a08 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/runtime/CapturedStreamOutput.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/runtime/CapturedStreamOutput.java @@ -92,6 +92,7 @@ public boolean isBufferTruncated() { * * @throws IOException When unable to read or write. */ + @SuppressWarnings("deprecation") public void read() throws IOException { int readCount = 0; try { diff --git a/src/main/java/org/broadinstitute/hellbender/utils/runtime/ProcessController.java b/src/main/java/org/broadinstitute/hellbender/utils/runtime/ProcessController.java index 38f6beb257a..2e56521531d 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/runtime/ProcessController.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/runtime/ProcessController.java @@ -71,6 +71,7 @@ public static int exec(String[] command) { * @param settings Settings to be run. * @return The output of the command. */ + @SuppressWarnings("deprecation") public ProcessOutput exec(ProcessSettings settings) { StreamOutput stdout; StreamOutput stderr; @@ -161,6 +162,7 @@ public ProcessOutput exec(ProcessSettings settings) { * TODO: Try to use NIO to interrupt streams. */ @Override + @SuppressWarnings("deprecation") protected void tryCleanShutdown() { destroyed = true; diff --git a/src/main/java/org/broadinstitute/hellbender/utils/runtime/StreamingProcessController.java b/src/main/java/org/broadinstitute/hellbender/utils/runtime/StreamingProcessController.java index 59e89775135..f83dbbe9dd5 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/runtime/StreamingProcessController.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/runtime/StreamingProcessController.java @@ -349,6 +349,7 @@ private void startListeners() { * NOTE: capture threads may block on read. */ @Override + @SuppressWarnings("deprecation") protected void tryCleanShutdown() { if (stdErrFuture != null && !stdErrFuture.isDone()) { boolean isCancelled = stdErrFuture.cancel(true); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/spark/PileupSparkIntegrationTest.java b/src/test/java/org/broadinstitute/hellbender/tools/spark/PileupSparkIntegrationTest.java index 48593923202..0ee14795506 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/spark/PileupSparkIntegrationTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/spark/PileupSparkIntegrationTest.java @@ -115,7 +115,11 @@ public void testInsertLengthPileup(boolean useShuffle) throws Exception { @Test(dataProvider = "shuffle") public void testFeaturesPileupHdfs(boolean useShuffle) throws Exception { - + // Skip this test when running on Java 11 since it fails with a Spark error that is not fixed until Spark 3 + // see https://issues.apache.org/jira/browse/SPARK-26963 + if (System.getProperty("java.specification.version").equals("11")) { + return; + } MiniClusterUtils.runOnIsolatedMiniCluster( cluster -> { final Path workingDirectory = MiniClusterUtils.getWorkingDir(cluster); final Path vcfPath = new Path(workingDirectory, "dbsnp_138.b37.20.21.vcf"); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/read/GATKReadAdaptersUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/read/GATKReadAdaptersUnitTest.java index ce1e29537e1..7463bdba781 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/read/GATKReadAdaptersUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/read/GATKReadAdaptersUnitTest.java @@ -55,8 +55,6 @@ public Object[][] readPairsForToString() { private static GATKRead basicReadBackedByADAMRecord(final SAMRecord sam) { final AlignmentRecord record = new AlignmentRecord(); - record.setContigName(sam.getContig()); - record.setRecordGroupSample(sam.getReadGroup().getSample()); record.setReadName(sam.getReadName()); record.setSequence(new String(sam.getReadBases())); record.setStart((long)sam.getAlignmentStart()-1); //ADAM records are 0-based