Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,8 @@
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
Expand Down Expand Up @@ -196,6 +198,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility {
public static final boolean PRESPLIT_TEST_TABLE = true;

private MiniDFSCluster dfsCluster = null;
private FsDatasetAsyncDiskServiceFixer dfsClusterFixer = null;

private volatile HBaseCluster hbaseCluster = null;
private MiniMRCluster mrCluster = null;
Expand Down Expand Up @@ -574,6 +577,56 @@ public boolean cleanupDataTestDirOnTestFS(String subdirName) throws IOException
return getTestFileSystem().delete(cpath, true);
}

// Workaround to avoid IllegalThreadStateException
// See HBASE-27148 for more details
private static final class FsDatasetAsyncDiskServiceFixer extends Thread {

private volatile boolean stopped = false;

private final MiniDFSCluster cluster;

FsDatasetAsyncDiskServiceFixer(MiniDFSCluster cluster) {
super("FsDatasetAsyncDiskServiceFixer");
setDaemon(true);
this.cluster = cluster;
}

@Override
public void run() {
while (!stopped) {
try {
Thread.sleep(30000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
continue;
}
// we could add new datanodes during tests, so here we will check every 30 seconds, as the
// timeout of the thread pool executor is 60 seconds by default.
try {
for (DataNode dn : cluster.getDataNodes()) {
FsDatasetSpi<?> dataset = dn.getFSDataset();
Field service = dataset.getClass().getDeclaredField("asyncDiskService");
service.setAccessible(true);
Object asyncDiskService = service.get(dataset);
Field group = asyncDiskService.getClass().getDeclaredField("threadGroup");
group.setAccessible(true);
ThreadGroup threadGroup = (ThreadGroup) group.get(asyncDiskService);
if (threadGroup.isDaemon()) {
threadGroup.setDaemon(false);
}
}
} catch (Exception e) {
LOG.warn("failed to reset thread pool timeout for FsDatasetAsyncDiskService", e);
}
}
}

void shutdown() {
stopped = true;
interrupt();
}
}

/**
* Start a minidfscluster.
* @param servers How many DNs to start. n * @see #shutdownMiniDFSCluster()
Expand Down Expand Up @@ -632,7 +685,8 @@ public MiniDFSCluster startMiniDFSCluster(int servers, final String racks[], Str

this.dfsCluster =
new MiniDFSCluster(0, this.conf, servers, true, true, true, null, racks, hosts, null);

this.dfsClusterFixer = new FsDatasetAsyncDiskServiceFixer(dfsCluster);
this.dfsClusterFixer.start();
// Set this just-started cluster as our filesystem.
setFs();

Expand All @@ -656,6 +710,8 @@ public MiniDFSCluster startMiniDFSClusterForTestWAL(int namenodePort) throws IOE
"ERROR");
dfsCluster =
new MiniDFSCluster(namenodePort, conf, 5, false, true, true, null, null, null, null);
this.dfsClusterFixer = new FsDatasetAsyncDiskServiceFixer(dfsCluster);
this.dfsClusterFixer.start();
return dfsCluster;
}

Expand Down Expand Up @@ -778,6 +834,12 @@ public void shutdownMiniDFSCluster() throws IOException {
// The below throws an exception per dn, AsynchronousCloseException.
this.dfsCluster.shutdown();
dfsCluster = null;
// It is possible that the dfs cluster is set through setDFSCluster method, where we will not
// have a fixer
if (dfsClusterFixer != null) {
this.dfsClusterFixer.shutdown();
dfsClusterFixer = null;
}
dataTestDirOnTestFS = null;
CommonFSUtils.setFsDefault(this.conf, new Path("file:///"));
}
Expand Down
10 changes: 10 additions & 0 deletions hbase-testing-util/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,16 @@
<type>test-jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.github.stephenc.findbugs</groupId>
<artifactId>findbugs-annotations</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
Expand Down
121 changes: 39 additions & 82 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@
<java.min.version>${compileSource}</java.min.version>
<!-- Dependencies -->
<hadoop-two.version>2.10.0</hadoop-two.version>
<hadoop-three.version>3.1.2</hadoop-three.version>
<hadoop-three.version>3.2.3</hadoop-three.version>
<!-- These must be defined here for downstream build tools that don't look at profiles.
They ought to match the values found in our default hadoop profile, which is
currently "hadoop-2.0". See HBASE-15925 for more info. -->
Expand All @@ -557,7 +557,7 @@
<netty.hadoop.version>3.6.2.Final</netty.hadoop.version>
<!-- end HBASE-15925 default hadoop compatibility values -->
<audience-annotations.version>0.5.0</audience-annotations.version>
<avro.version>1.7.7</avro.version>
<avro.version>1.11.0</avro.version>
<caffeine.version>2.8.1</caffeine.version>
<commons-codec.version>1.13</commons-codec.version>
<commons-io.version>2.11.0</commons-io.version>
Expand Down Expand Up @@ -2816,8 +2816,6 @@
--add-opens java.base/java.lang.reflect=ALL-UNNAMED
--add-exports java.base/jdk.internal.misc=ALL-UNNAMED
${hbase-surefire.argLine}</argLine>
<!-- We need a minimum HDFS version of 3.2.0 for HADOOP-12760 -->
<hadoop-three.version>3.2.0</hadoop-three.version>
<!--
Value to use for surefire when running jdk11.
TODO: replicate logic for windows
Expand Down Expand Up @@ -3643,12 +3641,16 @@
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
Expand All @@ -3666,14 +3668,6 @@
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
Expand Down Expand Up @@ -3702,12 +3696,8 @@
<version>${hadoop-three.version}</version>
<exclusions>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
<groupId>org.codehaus.jackson</groupId>
<artifactId>*</artifactId>
</exclusion>
<!--HERE-->
<exclusion>
Expand Down Expand Up @@ -3777,19 +3767,7 @@
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
Expand Down Expand Up @@ -3860,11 +3838,7 @@
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
Expand Down Expand Up @@ -3929,11 +3903,7 @@
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>com.google.guava</groupId>
Expand Down Expand Up @@ -3968,38 +3938,6 @@
-->
</exclusions>
</dependency>
<dependency>
<!-- Is this needed? Seems a duplicate of the above dependency but for the
classifier-->
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop-three.version}</version>
<classifier>tests</classifier>
<type>test-jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>ch.qos.reload4j</groupId>
<artifactId>reload4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-reload4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
Expand Down Expand Up @@ -4081,12 +4019,8 @@
<artifactId>junit</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehause.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehause.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
Expand Down Expand Up @@ -4182,6 +4116,29 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-reload4j</artifactId>
</exclusion>
<!--
Needed in test context when hadoop-3.3 runs.
<exclusion>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</exclusion>
-->
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>javax.servlet.jsp</groupId>
<artifactId>jsp-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
</exclusion>
<exclusion>
<groupId>javax.ws.rs</groupId>
<artifactId>jsr311-api</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down