Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions dev-support/pmd/pmd-ruleset.xml
Original file line number Diff line number Diff line change
Expand Up @@ -32,5 +32,7 @@
<rule ref="category/java/bestpractices.xml/UnusedPrivateField"/>
<rule ref="category/java/bestpractices.xml/UseCollectionIsEmpty" />

<rule ref="category/java/performance.xml/AvoidFileStream"/>

<exclude-pattern>.*/generated-sources/.*</exclude-pattern>
</ruleset>
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@
import static org.apache.commons.collections.EnumerationUtils.toList;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
Expand Down Expand Up @@ -109,7 +109,7 @@ public NodeSchemaLoadResult loadSchemaFromFile(String schemaFilePath)
if (schemaFile.exists()) {
LOG.info("Load network topology schema file {}",
schemaFile.getAbsolutePath());
try (FileInputStream inputStream = new FileInputStream(schemaFile)) {
try (InputStream inputStream = Files.newInputStream(schemaFile.toPath())) {
return loadSchemaFromStream(schemaFilePath, inputStream);
}
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@

import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
Expand Down Expand Up @@ -244,7 +242,7 @@ public synchronized void writeCertificate(Path basePath, String fileName,
File certificateFile =
Paths.get(basePath.toString(), fileName).toFile();

try (FileOutputStream file = new FileOutputStream(certificateFile)) {
try (OutputStream file = Files.newOutputStream(certificateFile.toPath())) {
file.write(pemEncodedCertificate.getBytes(DEFAULT_CHARSET));
}
LOG.info("Save certificate to {}", certificateFile.getAbsolutePath());
Expand All @@ -271,7 +269,7 @@ private CertPath getCertPath(Path path, String fileName) throws IOException,
throw new IOException("Unable to find the requested certificate file. " +
"Path: " + certFile);
}
try (FileInputStream is = new FileInputStream(certFile)) {
try (InputStream is = Files.newInputStream(certFile.toPath())) {
return generateCertPathFromInputStream(is);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,16 @@

package org.apache.hadoop.hdds.utils;

import jakarta.annotation.Nonnull;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collection;
import java.util.Properties;
import org.apache.ratis.util.AtomicFileOutputStream;
import org.slf4j.Logger;

/**
Expand Down Expand Up @@ -95,4 +103,20 @@ public static void closeQuietly(AutoCloseable... closeables) {
public static void closeQuietly(Collection<? extends AutoCloseable> closeables) {
close(null, closeables);
}

/** Write {@code properties} to the file at {@code path}, truncating any existing content. */
public static void writePropertiesToFile(File file, Properties properties) throws IOException {
try (OutputStream out = new AtomicFileOutputStream(file)) {
properties.store(out, null);
}
}

/** Read {@link Properties} from the file at {@code path}. */
public static @Nonnull Properties readPropertiesFromFile(File file) throws IOException {
Properties props = new Properties();
try (InputStream in = Files.newInputStream(file.toPath())) {
props.load(in);
}
return props;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,12 @@

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.time.Duration;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
Expand Down Expand Up @@ -83,7 +84,7 @@ public void testGetAllPropertiesByTags(@TempDir File tempDir)
throws Exception {
File coreDefault = new File(tempDir, "core-default-test.xml");
File coreSite = new File(tempDir, "core-site-test.xml");
FileOutputStream coreDefaultStream = new FileOutputStream(coreDefault);
OutputStream coreDefaultStream = Files.newOutputStream(coreDefault.toPath());
try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
coreDefaultStream, StandardCharsets.UTF_8))) {
startConfig(out);
Expand All @@ -102,7 +103,7 @@ public void testGetAllPropertiesByTags(@TempDir File tempDir)
.getProperty("dfs.random.key"));
}

FileOutputStream coreSiteStream = new FileOutputStream(coreSite);
OutputStream coreSiteStream = Files.newOutputStream(coreSite.toPath());
try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
coreSiteStream, StandardCharsets.UTF_8))) {
startConfig(out);
Expand Down Expand Up @@ -286,7 +287,7 @@ public void testInstantiationWithInputConfiguration(@TempDir File tempDir)
Configuration configuration = new Configuration(true);

File ozoneSite = new File(tempDir, "ozone-site.xml");
FileOutputStream ozoneSiteStream = new FileOutputStream(ozoneSite);
OutputStream ozoneSiteStream = Files.newOutputStream(ozoneSite.toPath());
try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
ozoneSiteStream, StandardCharsets.UTF_8))) {
startConfig(out);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,9 @@

import com.google.common.base.Preconditions;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
Expand Down Expand Up @@ -177,7 +178,7 @@ public static synchronized DatanodeDetails readDatanodeDetailsFrom(File path)
LOG.warn("Error loading DatanodeDetails yaml from {}",
path.getAbsolutePath(), e);
// Try to load as protobuf before giving up
try (FileInputStream in = new FileInputStream(path)) {
try (InputStream in = Files.newInputStream(path.toPath())) {
return DatanodeDetails.getFromProtoBuf(
HddsProtos.DatanodeDetailsProto.parseFrom(in));
} catch (IOException io) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@
package org.apache.hadoop.ozone.container.common.helpers;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.reflect.Field;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -72,7 +72,7 @@ public static void createDatanodeIdFile(DatanodeDetails datanodeDetails,
Yaml yaml = new Yaml(options);

try (Writer writer = new OutputStreamWriter(
new FileOutputStream(path), StandardCharsets.UTF_8)) {
Files.newOutputStream(path.toPath()), StandardCharsets.UTF_8)) {
yaml.dump(getDatanodeDetailsYaml(datanodeDetails, conf), writer);
}
}
Expand All @@ -83,7 +83,7 @@ public static void createDatanodeIdFile(DatanodeDetails datanodeDetails,
public static DatanodeDetails readDatanodeIdFile(File path)
throws IOException {
DatanodeDetails datanodeDetails;
try (FileInputStream inputFileStream = new FileInputStream(path)) {
try (InputStream inputFileStream = Files.newInputStream(path.toPath())) {
DatanodeDetailsYaml datanodeDetailsYaml;
try {
datanodeDetailsYaml =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,9 @@
package org.apache.hadoop.ozone.container.common.helpers;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.Properties;
import org.apache.hadoop.hdds.utils.IOUtils;
import org.apache.hadoop.ozone.OzoneConsts;

/**
Expand Down Expand Up @@ -64,31 +62,16 @@ private Properties createProperties() {
*/
public void createVersionFile(File path) throws
IOException {
try (RandomAccessFile file = new RandomAccessFile(path, "rws");
FileOutputStream out = new FileOutputStream(file.getFD())) {
file.getChannel().truncate(0);
Properties properties = createProperties();
/*
* If server is interrupted before this line,
* the version file will remain unchanged.
*/
properties.store(out, null);
}
IOUtils.writePropertiesToFile(path, createProperties());
}


/**
* Creates a property object from the specified file content.
* @param versionFile
* @return Properties
* @throws IOException
*/
public static Properties readFrom(File versionFile) throws IOException {
try (RandomAccessFile file = new RandomAccessFile(versionFile, "rws");
FileInputStream in = new FileInputStream(file.getFD())) {
Properties props = new Properties();
props.load(in);
return props;
}
return IOUtils.readPropertiesFromFile(versionFile);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,30 +17,34 @@

package org.apache.hadoop.ozone.container.common.impl;

import static java.nio.file.StandardOpenOption.CREATE;
import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
import static java.nio.file.StandardOpenOption.WRITE;
import static org.apache.hadoop.ozone.OzoneConsts.REPLICA_INDEX;
import static org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData.KEYVALUE_YAML_TAG;

import com.google.common.base.Preconditions;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerType;
import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
import org.apache.hadoop.hdds.utils.IOUtils;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
import org.apache.ratis.util.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.DumperOptions;
Expand Down Expand Up @@ -81,7 +85,7 @@ private ContainerDataYaml() {
public static void createContainerFile(ContainerType containerType,
ContainerData containerData, File containerFile) throws IOException {
Writer writer = null;
FileOutputStream out = null;
OutputStream out = null;
try {
boolean withReplicaIndex =
containerData instanceof KeyValueContainerData &&
Expand All @@ -93,16 +97,13 @@ public static void createContainerFile(ContainerType containerType,
containerData.computeAndSetChecksum(yaml);

// Write the ContainerData with checksum to Yaml file.
out = new FileOutputStream(
containerFile);
out = FileUtils.newOutputStreamForceAtClose(containerFile, CREATE, TRUNCATE_EXISTING, WRITE);
writer = new OutputStreamWriter(out, StandardCharsets.UTF_8);
yaml.dump(containerData, writer);
} finally {
try {
if (writer != null) {
writer.flush();
// make sure the container metadata is synced to disk.
out.getFD().sync();
writer.close();
}
} catch (IOException ex) {
Expand All @@ -121,7 +122,7 @@ public static void createContainerFile(ContainerType containerType,
public static ContainerData readContainerFile(File containerFile)
throws IOException {
Preconditions.checkNotNull(containerFile, "containerFile cannot be null");
try (FileInputStream inputFileStream = new FileInputStream(containerFile)) {
try (InputStream inputFileStream = Files.newInputStream(containerFile.toPath())) {
return readContainer(inputFileStream);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,18 @@

package org.apache.hadoop.ozone.container.common.transport.server.ratis;

import static java.nio.file.StandardOpenOption.CREATE;
import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
import static java.nio.file.StandardOpenOption.WRITE;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
Expand Down Expand Up @@ -96,6 +100,7 @@
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.ratis.thirdparty.com.google.protobuf.TextFormat;
import org.apache.ratis.util.FileUtils;
import org.apache.ratis.util.JavaUtils;
import org.apache.ratis.util.LifeCycle;
import org.apache.ratis.util.TaskQueue;
Expand Down Expand Up @@ -327,7 +332,7 @@ private long loadSnapshot(SingleFileSnapshotInfo snapshot)
public void buildMissingContainerSet(File snapshotFile) throws IOException {
// initialize the dispatcher with snapshot so that it build the missing
// container list
try (FileInputStream fin = new FileInputStream(snapshotFile)) {
try (InputStream fin = Files.newInputStream(snapshotFile.toPath())) {
ContainerProtos.Container2BCSIDMapProto proto =
ContainerProtos.Container2BCSIDMapProto
.parseFrom(fin);
Expand Down Expand Up @@ -374,11 +379,9 @@ public long takeSnapshot() throws IOException {
final File snapshotFile =
storage.getSnapshotFile(ti.getTerm(), ti.getIndex());
LOG.info("{}: Taking a snapshot at:{} file {}", getGroupId(), ti, snapshotFile);
try (FileOutputStream fos = new FileOutputStream(snapshotFile)) {
try (OutputStream fos = FileUtils.newOutputStreamForceAtClose(snapshotFile, CREATE, TRUNCATE_EXISTING, WRITE)) {
persistContainerSet(fos);
fos.flush();
// make sure the snapshot file is synced
fos.getFD().sync();
} catch (IOException ioe) {
LOG.error("{}: Failed to write snapshot at:{} file {}", getGroupId(), ti,
snapshotFile);
Expand Down
Loading