Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -84,11 +84,13 @@ public byte[] unpackContainerData(Container<KeyValueContainerData> container,
if (name.startsWith(DB_DIR_NAME + "/")) {
Path destinationPath = dbRoot
.resolve(name.substring(DB_DIR_NAME.length() + 1));
extractEntry(archiveInput, size, dbRoot, destinationPath);
extractEntry(entry, archiveInput, size, dbRoot,
destinationPath);
} else if (name.startsWith(CHUNKS_DIR_NAME + "/")) {
Path destinationPath = chunksRoot
.resolve(name.substring(CHUNKS_DIR_NAME.length() + 1));
extractEntry(archiveInput, size, chunksRoot, destinationPath);
extractEntry(entry, archiveInput, size, chunksRoot,
destinationPath);
} else if (CONTAINER_FILE_NAME.equals(name)) {
//Don't do anything. Container file should be unpacked in a
//separated step by unpackContainerDescriptor call.
Expand All @@ -109,27 +111,32 @@ public byte[] unpackContainerData(Container<KeyValueContainerData> container,
}
}

private void extractEntry(InputStream input, long size,
Path ancestor, Path path) throws IOException {
private void extractEntry(ArchiveEntry entry, InputStream input, long size,
Path ancestor, Path path) throws IOException {
HddsUtils.validatePath(path, ancestor);
Path parent = path.getParent();
if (parent != null) {
Files.createDirectories(parent);
}

try (OutputStream fileOutput = new FileOutputStream(path.toFile());
OutputStream output = new BufferedOutputStream(fileOutput)) {
int bufferSize = 1024;
byte[] buffer = new byte[bufferSize + 1];
long remaining = size;
while (remaining > 0) {
int len = (int) Math.min(remaining, bufferSize);
int read = input.read(buffer, 0, len);
if (read >= 0) {
remaining -= read;
output.write(buffer, 0, read);
} else {
remaining = 0;
if (entry.isDirectory()) {
Files.createDirectories(path);
} else {
Path parent = path.getParent();
if (parent != null) {
Files.createDirectories(parent);
}

try (OutputStream fileOutput = new FileOutputStream(path.toFile());
OutputStream output = new BufferedOutputStream(fileOutput)) {
int bufferSize = 1024;
byte[] buffer = new byte[bufferSize + 1];
long remaining = size;
while (remaining > 0) {
int len = (int) Math.min(remaining, bufferSize);
int read = input.read(buffer, 0, len);
if (read >= 0) {
remaining -= read;
output.write(buffer, 0, read);
} else {
remaining = 0;
}
}
}
}
Expand Down Expand Up @@ -209,6 +216,12 @@ private byte[] readEntry(InputStream input, final long size)
private void includePath(Path dir, String subdir,
ArchiveOutputStream archiveOutput) throws IOException {

// Add a directory entry before adding files, in case the directory is
// empty.
ArchiveEntry entry = archiveOutput.createArchiveEntry(dir.toFile(), subdir);
archiveOutput.putArchiveEntry(entry);

// Add files in the directory.
try (Stream<Path> dirEntries = Files.list(dir)) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think the error is thrown here, so it still needs the CHUNK file to exist?

Reproduced by adding new File(data.getChunksPath()).delete(); after checking the state of original container.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@symious the error is thrown because the chunk directory does not exist. Not having chunk files in the directory should not be a problem here.

for (Path path : dirEntries.collect(toList())) {
String entryName = subdir + "/" + path.getFileName();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
Expand Down Expand Up @@ -258,6 +259,12 @@ public static void parseKVContainerData(KeyValueContainerData kvContainerData,
initializeUsedBytesAndBlockCount(store, kvContainerData);
}

// If the container is missing a chunks directory, possibly due to the
// bug fixed by HDDS-6235, create it here.
File chunksDir = new File(kvContainerData.getChunksPath());
if (!chunksDir.exists()) {
Files.createDirectories(chunksDir.toPath());
}
// Run advanced container inspection/repair operations if specified on
// startup. If this method is called but not as a part of startup,
// The inspectors will be unloaded and this will be a no-op.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import org.apache.hadoop.ozone.container.common.volume.VolumeSet;
import org.apache.hadoop.ozone.container.common.volume.MutableVolumeSet;
import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
import org.apache.hadoop.ozone.container.keyvalue.helpers.KeyValueContainerUtil;
import org.apache.hadoop.ozone.container.metadata.AbstractDatanodeStore;
import org.apache.hadoop.ozone.container.metadata.DatanodeStore;
import org.apache.ozone.test.GenericTestUtils;
Expand All @@ -66,6 +67,8 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.List;
Expand All @@ -74,6 +77,7 @@
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;

import static org.apache.hadoop.hdds.HddsConfigKeys.HDDS_DB_PROFILE;
import static org.apache.ratis.util.Preconditions.assertTrue;
Expand Down Expand Up @@ -154,6 +158,55 @@ public void testCreateContainer() throws Exception {
"DB does not exist");
}

/**
* Tests repair of containers affected by the bug reported in HDDS-6235.
*/
@Test
public void testMissingChunksDirCreated() throws Exception {
// Create an empty container and delete its chunks directory.
createContainer();
closeContainer();
// Sets the checksum.
populate(0);
KeyValueContainerData data = keyValueContainer.getContainerData();
File chunksDir = new File(data.getChunksPath());
Assert.assertTrue(chunksDir.delete());

// When the container is loaded, the missing chunks directory should
// be created.
KeyValueContainerUtil.parseKVContainerData(data, CONF);
Assert.assertTrue(chunksDir.exists());
}

@Test
public void testEmptyContainerImportExport() throws Exception {
createContainer();
closeContainer();

KeyValueContainerData data = keyValueContainer.getContainerData();

// Check state of original container.
checkContainerFilesPresent(data, 0);

//destination path
File exportTar = folder.newFile("exported.tar.gz");
TarContainerPacker packer = new TarContainerPacker();
//export the container
try (FileOutputStream fos = new FileOutputStream(exportTar)) {
keyValueContainer.exportContainerData(fos, packer);
}

keyValueContainer.delete();

// import container.
try (FileInputStream fis = new FileInputStream(exportTar)) {
keyValueContainer.importContainerData(fis, packer);
}

// Make sure empty chunks dir was unpacked.
checkContainerFilesPresent(data, 0);
}

@Test
public void testContainerImportExport() throws Exception {
long containerId = keyValueContainer.getContainerData().getContainerID();
Expand Down Expand Up @@ -244,6 +297,18 @@ public void testContainerImportExport() throws Exception {
}
}

private void checkContainerFilesPresent(KeyValueContainerData data,
long expectedNumFilesInChunksDir) throws IOException {
File chunksDir = new File(data.getChunksPath());
Assert.assertTrue(Files.isDirectory(chunksDir.toPath()));
try (Stream<Path> stream = Files.list(chunksDir.toPath())) {
Assert.assertEquals(expectedNumFilesInChunksDir, stream.count());
}
Assert.assertTrue(data.getDbFile().exists());
Assert.assertTrue(KeyValueContainer.getContainerFile(data.getMetadataPath(),
data.getContainerID()).exists());
}

/**
* Create the container on disk.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.common.interfaces.ContainerInspector;
Expand All @@ -37,8 +36,6 @@
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;

import java.io.File;

/**
* Tests for {@link KeyValueContainerMetadataInspector}.
*/
Expand Down Expand Up @@ -107,39 +104,6 @@ public void testSystemPropertyAndReadOnly() {
System.clearProperty(KeyValueContainerMetadataInspector.SYSTEM_PROPERTY);
}

@Test
public void testMissingChunksDir() throws Exception {
// Create container with missing chunks dir.
// The metadata in the DB will not be set in this fake container.
KeyValueContainer container = createClosedContainer(0);
KeyValueContainerData containerData = container.getContainerData();
String chunksDirStr = containerData.getChunksPath();
File chunksDirFile = new File(chunksDirStr);
FileUtils.deleteDirectory(chunksDirFile);
Assert.assertFalse(chunksDirFile.exists());

// In inspect mode, missing chunks dir should be detected but not fixed.
JsonObject inspectJson = runInspectorAndGetReport(containerData,
KeyValueContainerMetadataInspector.Mode.INSPECT);
// The block count and used bytes should be null in this container, but
// because it has no block keys that should not be an error.
Assert.assertEquals(1,
inspectJson.getAsJsonArray("errors").size());
checkJsonErrorsReport(inspectJson, "chunksDirectory.present",
new JsonPrimitive(true), new JsonPrimitive(false), false);
Assert.assertFalse(chunksDirFile.exists());

// In repair mode, missing chunks dir should be detected and fixed.
JsonObject repairJson = runInspectorAndGetReport(containerData,
KeyValueContainerMetadataInspector.Mode.REPAIR);
Assert.assertEquals(1,
inspectJson.getAsJsonArray("errors").size());
checkJsonErrorsReport(repairJson, "chunksDirectory.present",
new JsonPrimitive(true), new JsonPrimitive(false), true);
Assert.assertTrue(chunksDirFile.exists());
Assert.assertTrue(chunksDirFile.isDirectory());
}

@Test
public void testIncorrectTotalsNoData() throws Exception {
int createBlocks = 0;
Expand Down