Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,15 @@

package org.apache.hadoop.hdds.server;

import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import org.apache.ratis.util.AtomicFileOutputStream;
import org.slf4j.Logger;
import org.yaml.snakeyaml.LoaderOptions;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.inspector.TagInspector;
Expand Down Expand Up @@ -46,4 +53,14 @@ private static Yaml getYamlForLoad() {
loaderOptions.setTagInspector(tags);
return new Yaml(loaderOptions);
}

public static void dump(Yaml yaml, Object data, File file, Logger log) throws IOException {
try (OutputStream out = new AtomicFileOutputStream(file);
OutputStreamWriter writer = new OutputStreamWriter(out, StandardCharsets.UTF_8)) {
yaml.dump(data, writer);
} catch (IOException e) {
log.warn("Failed to dump {}", data, e);
throw e;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,7 @@
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.reflect.Field;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.LinkedHashMap;
import java.util.List;
Expand Down Expand Up @@ -71,10 +68,8 @@ public static void createDatanodeIdFile(DatanodeDetails datanodeDetails,
options.setDefaultFlowStyle(DumperOptions.FlowStyle.FLOW);
Yaml yaml = new Yaml(options);

try (Writer writer = new OutputStreamWriter(
Files.newOutputStream(path.toPath()), StandardCharsets.UTF_8)) {
yaml.dump(getDatanodeDetailsYaml(datanodeDetails, conf), writer);
}
final DatanodeDetailsYaml data = getDatanodeDetailsYaml(datanodeDetails, conf);
YamlUtils.dump(yaml, data, path, LOG);
}

/**
Expand Down Expand Up @@ -228,6 +223,11 @@ public int getCurrentVersion() {
public void setCurrentVersion(int version) {
this.currentVersion = version;
}

@Override
public String toString() {
return "DatanodeDetailsYaml(" + uuid + ", " + hostName + "/" + ipAddress + ")";
}
}

private static DatanodeDetailsYaml getDatanodeDetailsYaml(
Expand Down Expand Up @@ -268,7 +268,7 @@ private static DatanodeDetailsYaml getDatanodeDetailsYaml(
}

return new DatanodeDetailsYaml(
datanodeDetails.getUuid().toString(),
datanodeDetails.getUuidString(),
datanodeDetails.getIpAddress(),
datanodeDetails.getHostName(),
datanodeDetails.getCertSerialId(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,9 @@ public abstract class ContainerData {
private boolean committedSpace;

//ID of the pipeline where this container is created
private String originPipelineId;
private final String originPipelineId;
//ID of the datanode where this container is created
private String originNodeId;
private final String originNodeId;

/** parameters for read/write statistics on the container. **/
private final AtomicLong readBytes;
Expand Down Expand Up @@ -667,4 +667,12 @@ public void updateWriteStats(long bytesWritten, boolean overwrite) {
incrWriteBytes(bytesWritten);
}

@Override
public String toString() {
return getClass().getSimpleName() + " #" + containerID
+ " (" + state
+ ", " + (isEmpty ? "empty" : "non-empty")
+ ", ri=" + replicaIndex
+ ", origin=[dn_" + originNodeId + ", pipeline_" + originPipelineId + "])";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,6 @@

package org.apache.hadoop.ozone.container.common.impl;

import static java.nio.file.StandardOpenOption.CREATE;
import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
import static java.nio.file.StandardOpenOption.WRITE;
import static org.apache.hadoop.ozone.OzoneConsts.REPLICA_INDEX;
import static org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData.KEYVALUE_YAML_TAG;

Expand All @@ -28,10 +25,6 @@
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
Expand All @@ -41,10 +34,9 @@
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerType;
import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
import org.apache.hadoop.hdds.utils.IOUtils;
import org.apache.hadoop.hdds.server.YamlUtils;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
import org.apache.ratis.util.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.DumperOptions;
Expand Down Expand Up @@ -77,41 +69,15 @@ private ContainerDataYaml() {

/**
* Creates a .container file in yaml format.
*
* @param containerFile
* @param containerData
* @throws IOException
*/
public static void createContainerFile(ContainerType containerType,
ContainerData containerData, File containerFile) throws IOException {
Writer writer = null;
OutputStream out = null;
try {
boolean withReplicaIndex =
containerData instanceof KeyValueContainerData &&
((KeyValueContainerData) containerData).getReplicaIndex() > 0;

// Create Yaml for given container type
Yaml yaml = getYamlForContainerType(containerType, withReplicaIndex);
// Compute Checksum and update ContainerData
containerData.computeAndSetChecksum(yaml);

// Write the ContainerData with checksum to Yaml file.
out = FileUtils.newOutputStreamForceAtClose(containerFile, CREATE, TRUNCATE_EXISTING, WRITE);
writer = new OutputStreamWriter(out, StandardCharsets.UTF_8);
yaml.dump(containerData, writer);
} finally {
try {
if (writer != null) {
writer.flush();
writer.close();
}
} catch (IOException ex) {
LOG.warn("Error occurred during closing the writer. ContainerID: " +
containerData.getContainerID());
}
IOUtils.closeQuietly(out);
}
public static void createContainerFile(ContainerData containerData, File containerFile) throws IOException {
// Create Yaml for given container type
final Yaml yaml = getYamlForContainerType(containerData.getContainerType(), containerData.getReplicaIndex() > 0);
// Compute Checksum and update ContainerData
containerData.computeAndSetChecksum(yaml);

// Write the ContainerData with checksum to Yaml file.
YamlUtils.dump(yaml, containerData, containerFile, LOG);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -294,8 +294,7 @@ private void writeToContainerFile(File containerFile, boolean isCreate)
long containerId = containerData.getContainerID();
try {
tempContainerFile = createTempFile(containerFile);
ContainerDataYaml.createContainerFile(
ContainerType.KeyValueContainer, containerData, tempContainerFile);
ContainerDataYaml.createContainerFile(containerData, tempContainerFile);

// NativeIO.renameTo is an atomic function. But it might fail if the
// container file already exists. Hence, we handle the two cases
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,8 @@ void testWriteReadBeforeRatisDatastreamPortLayoutVersion(@TempDir File dir)
void testWriteReadAfterRatisDatastreamPortLayoutVersion(@TempDir File dir)
throws IOException {
DatanodeDetails original = MockDatanodeDetails.randomDatanodeDetails();
assertEquals(original.getUuid().toString(), original.getUuidString());

File file = new File(dir, "datanode.yaml");
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HddsConfigKeys.OZONE_METADATA_DIRS, dir.toString());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,7 @@ private File createContainerFile(long containerID, int replicaIndex)
File containerFile = new File(testRoot, containerPath);

// Create .container file with ContainerData
ContainerDataYaml.createContainerFile(ContainerProtos.ContainerType
.KeyValueContainer, keyValueContainerData, containerFile);
ContainerDataYaml.createContainerFile(keyValueContainerData, containerFile);

//Check .container file exists or not.
assertTrue(containerFile.exists());
Expand Down Expand Up @@ -140,8 +139,7 @@ public void testCreateContainerFile(ContainerLayoutVersion layout)
kvData.setState(ContainerProtos.ContainerDataProto.State.CLOSED);


ContainerDataYaml.createContainerFile(ContainerProtos.ContainerType
.KeyValueContainer, kvData, containerFile);
ContainerDataYaml.createContainerFile(kvData, containerFile);

// Reading newly updated data from .container file
kvData = (KeyValueContainerData) ContainerDataYaml.readContainerFile(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,9 +185,7 @@ public void testInconsistentChecksumContainerShouldThrowError() throws Exception
private File containerTarFile(
long containerId, ContainerData containerData) throws IOException {
File yamlFile = new File(tempDir, "container.yaml");
ContainerDataYaml.createContainerFile(
ContainerProtos.ContainerType.KeyValueContainer, containerData,
yamlFile);
ContainerDataYaml.createContainerFile(containerData, yamlFile);
File tarFile = new File(tempDir,
ContainerUtils.getContainerTarName(containerId));
try (OutputStream output = Files.newOutputStream(tarFile.toPath())) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -345,9 +345,7 @@ private void rewriteAndBackupContainerDataFile(ContainerData containerData,
result.setBackupContainerFilePath(bakFile.getAbsolutePath());

// gen new v3 container data file
ContainerDataYaml.createContainerFile(
ContainerProtos.ContainerType.KeyValueContainer,
copyContainerData, originContainerFile);
ContainerDataYaml.createContainerFile(copyContainerData, originContainerFile);

result.setNewContainerData(copyContainerData);
result.setNewContainerFilePath(originContainerFile.getAbsolutePath());
Expand Down