Skip to content

Commit 7f58d07

Browse files
Merge branch 'apache:trunk' into trunk
2 parents cfe1f7c + 2fbbfe3 commit 7f58d07

File tree

27 files changed

+371
-191
lines changed

27 files changed

+371
-191
lines changed

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java

+9-1
Original file line numberDiff line numberDiff line change
@@ -3581,7 +3581,15 @@ public static Class<? extends FileSystem> getFileSystemClass(String scheme,
35813581
throw new UnsupportedFileSystemException("No FileSystem for scheme "
35823582
+ "\"" + scheme + "\"");
35833583
}
3584-
LOGGER.debug("FS for {} is {}", scheme, clazz);
3584+
if (LOGGER.isDebugEnabled()) {
3585+
LOGGER.debug("FS for {} is {}", scheme, clazz);
3586+
final String jarLocation = ClassUtil.findContainingJar(clazz);
3587+
if (jarLocation != null) {
3588+
LOGGER.debug("Jar location for {} : {}", clazz, jarLocation);
3589+
} else {
3590+
LOGGER.debug("Class location for {} : {}", clazz, ClassUtil.findClassLocation(clazz));
3591+
}
3592+
}
35853593
return clazz;
35863594
}
35873595

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ClassUtil.java

+17-5
Original file line numberDiff line numberDiff line change
@@ -36,13 +36,25 @@ public class ClassUtil {
3636
* @return a jar file that contains the class, or null.
3737
*/
3838
public static String findContainingJar(Class<?> clazz) {
39-
ClassLoader loader = clazz.getClassLoader();
40-
String classFile = clazz.getName().replaceAll("\\.", "/") + ".class";
39+
return findContainingResource(clazz.getClassLoader(), clazz.getName(), "jar");
40+
}
41+
42+
/**
43+
* Find the absolute location of the class.
44+
*
45+
* @param clazz the class to find.
46+
* @return the class file with absolute location, or null.
47+
*/
48+
public static String findClassLocation(Class<?> clazz) {
49+
return findContainingResource(clazz.getClassLoader(), clazz.getName(), "file");
50+
}
51+
52+
private static String findContainingResource(ClassLoader loader, String clazz, String resource) {
53+
String classFile = clazz.replaceAll("\\.", "/") + ".class";
4154
try {
42-
for(final Enumeration<URL> itr = loader.getResources(classFile);
43-
itr.hasMoreElements();) {
55+
for (final Enumeration<URL> itr = loader.getResources(classFile); itr.hasMoreElements();) {
4456
final URL url = itr.nextElement();
45-
if ("jar".equals(url.getProtocol())) {
57+
if (resource.equals(url.getProtocol())) {
4658
String toReturn = url.getPath();
4759
if (toReturn.startsWith("file:")) {
4860
toReturn = toReturn.substring("file:".length());

hadoop-common-project/hadoop-common/src/site/markdown/NativeLibraries.md.vm

+1-1
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ The bin/hadoop script ensures that the native hadoop library is on the library p
104104
During runtime, check the hadoop log files for your MapReduce tasks.
105105

106106
* If everything is all right, then: `DEBUG util.NativeCodeLoader - Trying to load the custom-built native-hadoop library...` `INFO util.NativeCodeLoader - Loaded the native-hadoop library`
107-
* If something goes wrong, then: `INFO util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable`
107+
* If something goes wrong, then: `WARN util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable`
108108

109109
Check
110110
-----

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java

+4-2
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,9 @@ public void setUp() throws Exception {
102102

103103
@After
104104
public void tearDown() throws Exception {
105-
fSys.delete(new Path(getAbsoluteTestRootPath(fSys), new Path("test")), true);
105+
if (fSys != null) {
106+
fSys.delete(new Path(getAbsoluteTestRootPath(fSys), new Path("test")), true);
107+
}
106108
}
107109

108110

@@ -192,7 +194,7 @@ public void testWorkingDirectory() throws Exception {
192194

193195
@Test
194196
public void testWDAbsolute() throws IOException {
195-
Path absoluteDir = new Path(fSys.getUri() + "/test/existingDir");
197+
Path absoluteDir = getTestRootPath(fSys, "test/existingDir");
196198
fSys.mkdirs(absoluteDir);
197199
fSys.setWorkingDirectory(absoluteDir);
198200
Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java

+7-1
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,12 @@ public abstract class FileContextMainOperationsBaseTest {
8181
protected final FileContextTestHelper fileContextTestHelper =
8282
createFileContextHelper();
8383

84+
/**
85+
* Create the test helper.
86+
* Important: this is invoked during the construction of the base class,
87+
* so is very brittle.
88+
* @return a test helper.
89+
*/
8490
protected FileContextTestHelper createFileContextHelper() {
8591
return new FileContextTestHelper();
8692
}
@@ -107,7 +113,7 @@ public boolean accept(Path file) {
107113

108114
private static final byte[] data = getFileData(numBlocks,
109115
getDefaultBlockSize());
110-
116+
111117
@Before
112118
public void setUp() throws Exception {
113119
File testBuildData = GenericTestUtils.getRandomizedTestDir();

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java

+1-25
Original file line numberDiff line numberDiff line change
@@ -21,23 +21,13 @@
2121
import java.io.IOException;
2222

2323
import org.apache.hadoop.conf.Configuration;
24-
import org.junit.After;
25-
import org.junit.Assert;
26-
import org.junit.Before;
27-
import org.junit.Test;
2824

2925
public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTest {
3026

3127
@Override
3228
protected FileSystem createFileSystem() throws IOException {
3329
return FileSystem.getLocal(new Configuration());
3430
}
35-
36-
@Override
37-
@Before
38-
public void setUp() throws Exception {
39-
super.setUp();
40-
}
4131

4232
static Path wd = null;
4333
@Override
@@ -46,19 +36,5 @@ protected Path getDefaultWorkingDirectory() throws IOException {
4636
wd = FileSystem.getLocal(new Configuration()).getWorkingDirectory();
4737
return wd;
4838
}
49-
50-
@Override
51-
@After
52-
public void tearDown() throws Exception {
53-
super.tearDown();
54-
}
55-
56-
@Test
57-
@Override
58-
public void testWDAbsolute() throws IOException {
59-
Path absoluteDir = getTestRootPath(fSys, "test/existingDir");
60-
fSys.mkdirs(absoluteDir);
61-
fSys.setWorkingDirectory(absoluteDir);
62-
Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());
63-
}
39+
6440
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java

-9
Original file line numberDiff line numberDiff line change
@@ -53,14 +53,5 @@ public void tearDown() throws Exception {
5353
super.tearDown();
5454
ViewFileSystemTestSetup.tearDown(this, fcTarget);
5555
}
56-
57-
@Test
58-
@Override
59-
public void testWDAbsolute() throws IOException {
60-
Path absoluteDir = getTestRootPath(fSys, "test/existingDir");
61-
fSys.mkdirs(absoluteDir);
62-
fSys.setWorkingDirectory(absoluteDir);
63-
Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());
6456

65-
}
6657
}

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClassUtil.java

+35-9
Original file line numberDiff line numberDiff line change
@@ -20,21 +20,47 @@
2020

2121
import java.io.File;
2222

23-
import org.junit.Assert;
23+
import org.apache.hadoop.fs.viewfs.ViewFileSystem;
2424

25-
import org.apache.log4j.Logger;
25+
import org.assertj.core.api.Assertions;
2626
import org.junit.Test;
2727

2828
public class TestClassUtil {
29+
2930
@Test(timeout=10000)
3031
public void testFindContainingJar() {
31-
String containingJar = ClassUtil.findContainingJar(Logger.class);
32-
Assert.assertNotNull("Containing jar not found for Logger",
33-
containingJar);
32+
String containingJar = ClassUtil.findContainingJar(Assertions.class);
33+
Assertions
34+
.assertThat(containingJar)
35+
.describedAs("Containing jar for %s", Assertions.class)
36+
.isNotNull();
3437
File jarFile = new File(containingJar);
35-
Assert.assertTrue("Containing jar does not exist on file system ",
36-
jarFile.exists());
37-
Assert.assertTrue("Incorrect jar file " + containingJar,
38-
jarFile.getName().matches("reload4j.*[.]jar"));
38+
Assertions
39+
.assertThat(jarFile)
40+
.describedAs("Containing jar %s", jarFile)
41+
.exists();
42+
Assertions
43+
.assertThat(jarFile.getName())
44+
.describedAs("Containing jar name %s", jarFile.getName())
45+
.matches("assertj-core.*[.]jar");
46+
}
47+
48+
@Test(timeout = 10000)
49+
public void testFindContainingClass() {
50+
String classFileLocation = ClassUtil.findClassLocation(ViewFileSystem.class);
51+
Assertions
52+
.assertThat(classFileLocation)
53+
.describedAs("Class path for %s", ViewFileSystem.class)
54+
.isNotNull();
55+
File classFile = new File(classFileLocation);
56+
Assertions
57+
.assertThat(classFile)
58+
.describedAs("Containing class file %s", classFile)
59+
.exists();
60+
Assertions
61+
.assertThat(classFile.getName())
62+
.describedAs("Containing class file name %s", classFile.getName())
63+
.matches("ViewFileSystem.class");
3964
}
65+
4066
}

hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java

+38
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,11 @@
3333
import java.util.ArrayList;
3434
import java.util.Iterator;
3535
import java.util.List;
36+
import java.util.concurrent.ExecutionException;
37+
import java.util.concurrent.FutureTask;
38+
import java.util.concurrent.TimeUnit;
39+
import java.util.concurrent.Callable;
40+
import java.util.concurrent.TimeoutException;
3641

3742
import org.apache.hadoop.classification.InterfaceAudience;
3843
import org.apache.hadoop.classification.InterfaceStability;
@@ -120,4 +125,37 @@ private CombinedHostsFileReader() {
120125
}
121126
return allDNs;
122127
}
128+
129+
/**
130+
* Wrapper to call readFile with timeout via Future Tasks.
131+
* If timeout is reached, it will throw IOException
132+
* @param hostsFile the input json file to read from
133+
* @param readTimeout timeout for FutureTask execution in milliseconds
134+
* @return the set of DatanodeAdminProperties
135+
* @throws IOException
136+
*/
137+
public static DatanodeAdminProperties[]
138+
readFileWithTimeout(final String hostsFile, final int readTimeout) throws IOException {
139+
FutureTask<DatanodeAdminProperties[]> futureTask = new FutureTask<>(
140+
new Callable<DatanodeAdminProperties[]>() {
141+
@Override
142+
public DatanodeAdminProperties[] call() throws Exception {
143+
return readFile(hostsFile);
144+
}
145+
});
146+
147+
Thread thread = new Thread(futureTask);
148+
thread.start();
149+
150+
try {
151+
return futureTask.get(readTimeout, TimeUnit.MILLISECONDS);
152+
} catch (TimeoutException e) {
153+
futureTask.cancel(true);
154+
LOG.error("refresh File read operation timed out");
155+
throw new IOException("host file read operation timed out");
156+
} catch (InterruptedException | ExecutionException e) {
157+
LOG.error("File read operation interrupted : " + e.getMessage());
158+
throw new IOException("host file read operation timed out");
159+
}
160+
}
123161
}

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java

+2
Original file line numberDiff line numberDiff line change
@@ -757,6 +757,8 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
757757
"dfs.namenode.hosts.provider.classname";
758758
public static final String DFS_HOSTS = "dfs.hosts";
759759
public static final String DFS_HOSTS_EXCLUDE = "dfs.hosts.exclude";
760+
public static final String DFS_HOSTS_TIMEOUT = "dfs.hosts.timeout";
761+
public static final int DFS_HOSTS_TIMEOUT_DEFAULT = 0;
760762
public static final String DFS_NAMENODE_AUDIT_LOGGERS_KEY = "dfs.namenode.audit.loggers";
761763
public static final String DFS_NAMENODE_DEFAULT_AUDIT_LOGGER_NAME = "default";
762764
public static final String DFS_NAMENODE_AUDIT_LOG_TOKEN_TRACKING_ID_KEY = "dfs.namenode.audit.log.token.tracking.id";

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/CombinedHostFileManager.java

+7-4
Original file line numberDiff line numberDiff line change
@@ -179,12 +179,15 @@ public Configuration getConf() {
179179

180180
@Override
181181
public void refresh() throws IOException {
182-
refresh(conf.get(DFSConfigKeys.DFS_HOSTS, ""));
182+
refresh(conf.get(DFSConfigKeys.DFS_HOSTS, ""),
183+
conf.getInt(DFSConfigKeys.DFS_HOSTS_TIMEOUT, DFSConfigKeys.DFS_HOSTS_TIMEOUT_DEFAULT)
184+
);
183185
}
184-
private void refresh(final String hostsFile) throws IOException {
186+
private void refresh(final String hostsFile, final int readTimeout) throws IOException {
185187
HostProperties hostProps = new HostProperties();
186-
DatanodeAdminProperties[] all =
187-
CombinedHostsFileReader.readFile(hostsFile);
188+
DatanodeAdminProperties[] all = readTimeout != DFSConfigKeys.DFS_HOSTS_TIMEOUT_DEFAULT
189+
? CombinedHostsFileReader.readFileWithTimeout(hostsFile, readTimeout)
190+
: CombinedHostsFileReader.readFile(hostsFile);
188191
for(DatanodeAdminProperties properties : all) {
189192
InetSocketAddress addr = parseEntry(hostsFile,
190193
properties.getHostName(), properties.getPort());

hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml

+8-1
Original file line numberDiff line numberDiff line change
@@ -1131,7 +1131,14 @@
11311131
not permitted to connect to the namenode. The full pathname of the
11321132
file must be specified. If the value is empty, no hosts are
11331133
excluded.</description>
1134-
</property>
1134+
</property>
1135+
1136+
<property>
1137+
<name>dfs.hosts.timeout</name>
1138+
<value>0</value>
1139+
<description>Specifies a timeout (in milliseconds) for reading the dfs.hosts file.
1140+
A value of zero indicates no timeout to be set.</description>
1141+
</property>
11351142

11361143
<property>
11371144
<name>dfs.namenode.max.objects</name>

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestCombinedHostsFileReader.java

+57
Original file line numberDiff line numberDiff line change
@@ -19,14 +19,21 @@
1919

2020
import java.io.File;
2121
import java.io.FileWriter;
22+
import java.io.IOException;
23+
import java.util.concurrent.Callable;
2224

2325
import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
2426
import org.apache.hadoop.test.GenericTestUtils;
2527
import org.junit.Before;
2628
import org.junit.After;
2729
import org.junit.Test;
30+
import org.mockito.invocation.InvocationOnMock;
31+
import org.mockito.stubbing.Answer;
32+
import org.mockito.Mock;
33+
import org.mockito.Mockito;
2834

2935
import static org.junit.Assert.assertEquals;
36+
import static org.mockito.Mockito.when;
3037

3138
/**
3239
* Test for JSON based HostsFileReader.
@@ -44,8 +51,12 @@ public class TestCombinedHostsFileReader {
4451
private final File legacyFile =
4552
new File(TESTCACHEDATADIR, "legacy.dfs.hosts.json");
4653

54+
@Mock
55+
private Callable<DatanodeAdminProperties[]> callable;
56+
4757
@Before
4858
public void setUp() throws Exception {
59+
callable = Mockito.mock(Callable.class);
4960
}
5061

5162
@After
@@ -87,4 +98,50 @@ public void testEmptyCombinedHostsFileReader() throws Exception {
8798
CombinedHostsFileReader.readFile(newFile.getAbsolutePath());
8899
assertEquals(0, all.length);
89100
}
101+
102+
/*
103+
* When timeout is enabled, test for success when reading file within timeout
104+
* limits
105+
*/
106+
@Test
107+
public void testReadFileWithTimeoutSuccess() throws Exception {
108+
109+
DatanodeAdminProperties[] all = CombinedHostsFileReader.readFileWithTimeout(
110+
jsonFile.getAbsolutePath(), 1000);
111+
assertEquals(7, all.length);
112+
}
113+
114+
/*
115+
* When timeout is enabled, test for IOException when reading file exceeds
116+
* timeout limits
117+
*/
118+
@Test(expected = IOException.class)
119+
public void testReadFileWithTimeoutTimeoutException() throws Exception {
120+
when(callable.call()).thenAnswer(new Answer<Void>() {
121+
@Override
122+
public Void answer(InvocationOnMock invocation) throws Throwable {
123+
Thread.sleep(2000);
124+
return null;
125+
}
126+
});
127+
128+
CombinedHostsFileReader.readFileWithTimeout(
129+
jsonFile.getAbsolutePath(), 1);
130+
}
131+
132+
/*
133+
* When timeout is enabled, test for IOException when execution is interrupted
134+
*/
135+
@Test(expected = IOException.class)
136+
public void testReadFileWithTimeoutInterruptedException() throws Exception {
137+
when(callable.call()).thenAnswer(new Answer<Void>() {
138+
@Override
139+
public Void answer(InvocationOnMock invocation) throws Throwable {
140+
throw new InterruptedException();
141+
}
142+
});
143+
144+
CombinedHostsFileReader.readFileWithTimeout(
145+
jsonFile.getAbsolutePath(), 1);
146+
}
90147
}

0 commit comments

Comments
 (0)