Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@
package org.apache.hadoop.io.compress.zstd;

import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.CompressionInputStream;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.Compressor;
Expand Down Expand Up @@ -198,18 +198,16 @@ public void testSetInputWithBytesSizeMoreThenDefaultZStandardBufferSize()
@Test
public void testCompressorDecompressorLogicWithCompressionStreams()
throws Exception {
DataOutputStream deflateOut = null;
DataInputStream inflateIn = null;
int byteSize = 1024 * 100;
byte[] bytes = generate(byteSize);
int bufferSize = IO_FILE_BUFFER_SIZE_DEFAULT;
try {
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
CompressionOutputStream deflateFilter =
new CompressorStream(compressedDataBuffer, new ZStandardCompressor(),
bufferSize);
deflateOut =
new DataOutputStream(new BufferedOutputStream(deflateFilter));
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
CompressionOutputStream deflateFilter =
new CompressorStream(compressedDataBuffer, new ZStandardCompressor(),
bufferSize);
try (DataOutputStream deflateOut =
new DataOutputStream(new BufferedOutputStream(deflateFilter))) {
deflateOut.write(bytes, 0, bytes.length);
deflateOut.flush();
deflateFilter.finish();
Expand All @@ -229,8 +227,7 @@ public void testCompressorDecompressorLogicWithCompressionStreams()
assertArrayEquals("original array not equals compress/decompressed array",
result, bytes);
} finally {
IOUtils.closeQuietly(deflateOut);
IOUtils.closeQuietly(inflateIn);
IOUtils.closeStream(inflateIn);
}
}

Expand Down Expand Up @@ -358,18 +355,15 @@ public void testDecompressingOutput() throws Exception {
codec.createDecompressor());

byte[] toDecompress = new byte[100];
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] decompressedResult;
int totalFileSize = 0;
int result = toDecompress.length;
try {
try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
int result = toDecompress.length;
while ((result = inputStream.read(toDecompress, 0, result)) != -1) {
baos.write(toDecompress, 0, result);
totalFileSize += result;
}
decompressedResult = baos.toByteArray();
} finally {
IOUtils.closeQuietly(baos);
}

assertEquals(decompressedResult.length, totalFileSize);
Expand Down Expand Up @@ -435,20 +429,16 @@ public void testReadingWithAStream() throws Exception {
ZStandardCodec codec = new ZStandardCodec();
codec.setConf(CONFIGURATION);
Decompressor decompressor = codec.createDecompressor();
CompressionInputStream cis =
codec.createInputStream(inputStream, decompressor);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] resultOfDecompression;
try {
try (CompressionInputStream cis =
codec.createInputStream(inputStream, decompressor);
ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
byte[] buffer = new byte[100];
int n;
while ((n = cis.read(buffer, 0, buffer.length)) != -1) {
baos.write(buffer, 0, n);
}
resultOfDecompression = baos.toByteArray();
} finally {
IOUtils.closeQuietly(baos);
IOUtils.closeQuietly(cis);
}

byte[] expected = FileUtils.readFileToByteArray(uncompressedFile);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,13 @@
import java.util.function.Supplier;
import java.util.regex.Pattern;

import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.BlockingThreadPoolExecutorService;
import org.apache.hadoop.util.DurationInfo;
import org.apache.hadoop.util.StringUtils;
Expand Down Expand Up @@ -500,7 +500,7 @@ public String getOutput() {

@Override
public void close() throws Exception {
IOUtils.closeQuietly(bytesPrintStream);
IOUtils.closeStream(bytesPrintStream);
System.setErr(oldErr);
}
}
Expand Down Expand Up @@ -821,12 +821,10 @@ public static void assumeInNativeProfile() {
*/
public static String getFilesDiff(File a, File b) throws IOException {
StringBuilder bld = new StringBuilder();
BufferedReader ra = null, rb = null;
try {
ra = new BufferedReader(
new InputStreamReader(new FileInputStream(a)));
rb = new BufferedReader(
new InputStreamReader(new FileInputStream(b)));
try (BufferedReader ra = new BufferedReader(
new InputStreamReader(new FileInputStream(a)));
BufferedReader rb = new BufferedReader(
new InputStreamReader(new FileInputStream(b)))) {
while (true) {
String la = ra.readLine();
String lb = rb.readLine();
Expand All @@ -846,9 +844,6 @@ public static String getFilesDiff(File a, File b) throws IOException {
bld.append(" + ").append(lb).append("\n");
}
}
} finally {
IOUtils.closeQuietly(ra);
IOUtils.closeQuietly(rb);
}
return bld.toString();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@
import java.io.IOException;
import java.util.Random;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test;

Expand Down Expand Up @@ -521,12 +519,9 @@ public void testCoreCounts() throws IOException {

private void writeFakeCPUInfoFile(String content) throws IOException {
File tempFile = new File(FAKE_CPUFILE);
FileWriter fWriter = new FileWriter(FAKE_CPUFILE);
tempFile.deleteOnExit();
try {
try (FileWriter fWriter = new FileWriter(FAKE_CPUFILE)) {
tempFile.deleteOnExit();
fWriter.write(content);
} finally {
IOUtils.closeQuietly(fWriter);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,9 @@ public MiniKMS(String kmsConfDir, String log4ConfFile, String keyStore,

private void copyResource(String inputResourceName, File outputFile) throws
IOException {
InputStream is = ThreadUtil.getResourceAsStream(inputResourceName);
try (OutputStream os = new FileOutputStream(outputFile)) {
try (InputStream is = ThreadUtil.getResourceAsStream(inputResourceName);
OutputStream os = new FileOutputStream(outputFile)) {
IOUtils.copy(is, os);
} finally {
IOUtils.closeQuietly(is);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.ByteBufferPositionedReadable;
import org.apache.hadoop.fs.ByteBufferReadable;
Expand Down Expand Up @@ -77,6 +76,7 @@
import org.apache.hadoop.hdfs.shortcircuit.ClientMmap;
import org.apache.hadoop.hdfs.util.IOUtilsClient;
import org.apache.hadoop.io.ByteBufferPool;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.RetriableException;
Expand Down Expand Up @@ -1919,7 +1919,7 @@ private synchronized ByteBuffer tryReadZeroCopy(int maxLength,
success = true;
} finally {
if (!success) {
IOUtils.closeQuietly(clientMmap);
IOUtils.closeStream(clientMmap);
}
}
return buffer;
Expand All @@ -1934,7 +1934,7 @@ public synchronized void releaseBuffer(ByteBuffer buffer) {
"that was not created by this stream, " + buffer);
}
if (val instanceof ClientMmap) {
IOUtils.closeQuietly((ClientMmap)val);
IOUtils.closeStream((ClientMmap)val);
} else if (val instanceof ByteBufferPool) {
((ByteBufferPool)val).putBuffer(buffer);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import java.io.IOException;
import java.net.URI;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -63,24 +62,18 @@ public static boolean isHealthy(URI uri) {
conf.setInt(
CommonConfigurationKeysPublic.IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 0);

DistributedFileSystem fs = null;
try {
fs = (DistributedFileSystem)FileSystem.get(uri, conf);
try (DistributedFileSystem fs =
(DistributedFileSystem) FileSystem.get(uri, conf)) {
final boolean safemode = fs.setSafeMode(SafeModeAction.SAFEMODE_GET);
if (LOG.isDebugEnabled()) {
LOG.debug("Is namenode in safemode? " + safemode + "; uri=" + uri);
}

fs.close();
fs = null;
return !safemode;
} catch(IOException e) {
} catch (IOException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Got an exception for uri=" + uri, e);
}
return false;
} finally {
IOUtils.closeQuietly(fs);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
import java.net.InetSocketAddress;
import java.util.concurrent.TimeUnit;

import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.hdfs.DFSUtilClient;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
Expand Down Expand Up @@ -174,7 +174,7 @@ public DomainSocket createSocket(PathInfo info, int socketTimeout) {
} finally {
if (!success) {
if (sock != null) {
IOUtils.closeQuietly(sock);
IOUtils.closeStream(sock);
}
pathMap.put(info.getPath(), PathState.UNUSABLE);
sock = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -434,8 +434,8 @@ class BlockSender implements java.io.Closeable {
} catch (IOException ioe) {
IOUtils.cleanupWithLogger(null, volumeRef);
IOUtils.closeStream(this);
org.apache.commons.io.IOUtils.closeQuietly(blockIn);
org.apache.commons.io.IOUtils.closeQuietly(checksumIn);
IOUtils.closeStream(blockIn);
IOUtils.closeStream(checksumIn);
throw ioe;
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.net.Peer;
import org.apache.hadoop.hdfs.net.PeerServer;
import org.apache.hadoop.hdfs.util.DataTransferThrottler;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Daemon;

import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
Expand Down Expand Up @@ -248,10 +248,10 @@ public void run() {
LOG.warn("{}:DataXceiverServer", datanode.getDisplayName(), ace);
}
} catch (IOException ie) {
IOUtils.closeQuietly(peer);
IOUtils.closeStream(peer);
LOG.warn("{}:DataXceiverServer", datanode.getDisplayName(), ie);
} catch (OutOfMemoryError ie) {
IOUtils.closeQuietly(peer);
IOUtils.closeStream(peer);
// DataNode can run out of memory if there is too many transfers.
// Log the event, Sleep for 30 seconds, other transfers may complete by
// then.
Expand Down Expand Up @@ -334,7 +334,7 @@ void closePeer(Peer peer) {
peers.remove(peer);
peersXceiver.remove(peer);
datanode.metrics.decrDataNodeActiveXceiversCount();
IOUtils.closeQuietly(peer);
IOUtils.closeStream(peer);
if (peers.isEmpty()) {
this.noPeers.signalAll();
}
Expand Down Expand Up @@ -396,7 +396,7 @@ void closeAllPeers() {
LOG.info("Closing all peers.");
lock.lock();
try {
peers.keySet().forEach(p -> IOUtils.closeQuietly(p));
peers.keySet().forEach(IOUtils::closeStream);
peers.clear();
peersXceiver.clear();
datanode.metrics.setDataNodeActiveXceiversCount(0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ public FileInputStream getFileInputStream(
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return fis;
} catch(Exception e) {
org.apache.commons.io.IOUtils.closeQuietly(fis);
IOUtils.closeStream(fis);
onFailure(volume, begin);
throw e;
}
Expand Down Expand Up @@ -367,7 +367,7 @@ public FileOutputStream getFileOutputStream(
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return fos;
} catch(Exception e) {
org.apache.commons.io.IOUtils.closeQuietly(fos);
IOUtils.closeStream(fos);
onFailure(volume, begin);
throw e;
}
Expand Down Expand Up @@ -432,7 +432,7 @@ public FileInputStream getShareDeleteFileInputStream(
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return fis;
} catch(Exception e) {
org.apache.commons.io.IOUtils.closeQuietly(fis);
IOUtils.closeStream(fis);
onFailure(volume, begin);
throw e;
}
Expand Down Expand Up @@ -464,7 +464,7 @@ public FileInputStream openAndSeek(
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return fis;
} catch(Exception e) {
org.apache.commons.io.IOUtils.closeQuietly(fis);
IOUtils.closeStream(fis);
onFailure(volume, begin);
throw e;
}
Expand Down Expand Up @@ -495,7 +495,7 @@ public RandomAccessFile getRandomAccessFile(
profilingEventHook.afterMetadataOp(volume, OPEN, begin);
return raf;
} catch(Exception e) {
org.apache.commons.io.IOUtils.closeQuietly(raf);
IOUtils.closeStream(raf);
onFailure(volume, begin);
throw e;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
import java.util.Iterator;
import java.util.Set;

import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -322,7 +322,7 @@ public NewShmInfo createNewMemorySegment(String clientName,
shm = new RegisteredShm(clientName, shmId, fis, this);
} finally {
if (shm == null) {
IOUtils.closeQuietly(fis);
IOUtils.closeStream(fis);
}
}
info = new NewShmInfo(shmId, fis);
Expand Down Expand Up @@ -392,7 +392,7 @@ public void shutdown() {
if (!enabled) return;
enabled = false;
}
IOUtils.closeQuietly(watcher);
IOUtils.closeStream(watcher);
}

public static interface Visitor {
Expand Down
Loading