From c1d94aad78dee2bd0bd7ec18414ae91d1dd622ba Mon Sep 17 00:00:00 2001 From: sumitagrawl Date: Tue, 20 May 2025 11:00:33 +0530 Subject: [PATCH 1/7] HDDS-12926. remove *.tmp.* exclusion in DU --- .../src/main/java/org/apache/hadoop/hdds/fs/DUFactory.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/DUFactory.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/DUFactory.java index a89e914398c9..266244763cea 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/DUFactory.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/fs/DUFactory.java @@ -31,7 +31,6 @@ public class DUFactory implements SpaceUsageCheckFactory { private static final String DU_CACHE_FILE = "scmUsed"; - private static final String EXCLUDE_PATTERN = "*.tmp.*"; private Conf conf; @@ -46,7 +45,7 @@ public SpaceUsageCheckFactory setConfiguration( public SpaceUsageCheckParams paramsFor(File dir) { Duration refreshPeriod = conf.getRefreshPeriod(); - SpaceUsageSource source = new DU(dir, EXCLUDE_PATTERN); + SpaceUsageSource source = new DU(dir, null); SpaceUsagePersistence persistence = new SaveSpaceUsageToFile( new File(dir, DU_CACHE_FILE), refreshPeriod); From cec3094d0f573ee01693559ece3f25a7757be755 Mon Sep 17 00:00:00 2001 From: sumitagrawl Date: Tue, 17 Jun 2025 12:25:38 +0530 Subject: [PATCH 2/7] HDDS-13286. write fail for volume full during stream write --- .../impl/KeyValueStreamDataChannel.java | 1 + .../keyvalue/impl/StreamDataChannelBase.java | 9 +++++++ .../impl/TestKeyValueStreamDataChannel.java | 27 +++++++++++++++++++ 3 files changed, 37 insertions(+) diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyValueStreamDataChannel.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyValueStreamDataChannel.java index fe4bbd1478c2..37ffae62c303 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyValueStreamDataChannel.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyValueStreamDataChannel.java @@ -59,6 +59,7 @@ public int write(ReferenceCountedObject referenceCounted) throws IOException { getMetrics().incContainerOpsMetrics(getType()); assertOpen(); + assertSpaceAvailability(); return writeBuffers(referenceCounted, buffers, this::writeFileChannel); } diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java index b1adfbcac5d2..548bc63cabc6 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java @@ -17,6 +17,7 @@ package org.apache.hadoop.ozone.container.keyvalue.impl; +import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.DISK_OUT_OF_SPACE; import static org.apache.hadoop.ozone.container.common.utils.StorageVolumeUtil.onFailure; import java.io.File; @@ -93,6 +94,14 @@ public final boolean isOpen() { return getChannel().isOpen(); } + protected void assertSpaceAvailability() throws IOException { + if (containerData.getVolume().isVolumeFull()) { + throw new StorageContainerException("write failed for container " + containerData.getContainerID() + + " due to volume " + containerData.getVolume().getStorageID() + " out of space " + + containerData.getVolume().getCurrentUsage(), DISK_OUT_OF_SPACE); + } + } + public void setLinked() { linked.set(true); } diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java index e4c7decefd16..89ddbfe0976a 100644 --- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java @@ -24,8 +24,12 @@ import static org.apache.hadoop.ozone.container.keyvalue.impl.KeyValueStreamDataChannel.writeFully; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; @@ -38,6 +42,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.ThreadLocalRandom; import org.apache.commons.lang3.RandomUtils; +import org.apache.hadoop.hdds.fs.SpaceUsageSource; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.BlockData; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.DatanodeBlockID; @@ -48,6 +53,9 @@ import org.apache.hadoop.hdds.ratis.RatisHelper; import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException; import org.apache.hadoop.ozone.ClientVersion; +import org.apache.hadoop.ozone.container.common.helpers.ContainerMetrics; +import org.apache.hadoop.ozone.container.common.impl.ContainerData; +import org.apache.hadoop.ozone.container.common.volume.HddsVolume; import org.apache.hadoop.ozone.container.keyvalue.impl.KeyValueStreamDataChannel.WriteMethod; import org.apache.ratis.client.api.DataStreamOutput; import org.apache.ratis.io.FilePositionCount; @@ -151,6 +159,25 @@ private static ContainerCommandRequestProto readPutBlockRequest(ByteBuffer b) return request; } + @Test + public void testVolumeFullCase() throws Exception { + File tempFile = File.createTempFile("test-kv-stream", ".tmp"); + HddsVolume mockVolume = mock(HddsVolume.class); + when(mockVolume.getStorageID()).thenReturn("storageId"); + when(mockVolume.isVolumeFull()).thenReturn(true); + when(mockVolume.getCurrentUsage()).thenReturn(new SpaceUsageSource.Fixed(100L, 0L, 100L)); + ContainerData mockContainerData = mock(ContainerData.class); + when(mockContainerData.getContainerID()).thenReturn(123L); + when(mockContainerData.getVolume()).thenReturn(mockVolume); + ContainerMetrics mockMetrics = mock(ContainerMetrics.class); + KeyValueStreamDataChannel writeChannel = new KeyValueStreamDataChannel(tempFile, mockContainerData, mockMetrics); + assertThrows(StorageContainerException.class, writeChannel::assertSpaceAvailability); + final ByteBuffer putBlockBuf = ContainerCommandRequestMessage.toMessage( + PUT_BLOCK_PROTO, null).getContent().asReadOnlyByteBuffer(); + ReferenceCountedObject wrap = ReferenceCountedObject.wrap(putBlockBuf); + assertThrows(StorageContainerException.class, () -> writeChannel.write(wrap)); + } + @Test public void testBuffers() throws Exception { final ExecutorService executor = Executors.newFixedThreadPool(32); From d64d590f1e87162a0912de1cd9cbec49ffa8f82f Mon Sep 17 00:00:00 2001 From: sumitagrawl Date: Wed, 18 Jun 2025 14:06:25 +0530 Subject: [PATCH 3/7] review fix --- .../keyvalue/impl/KeyValueStreamDataChannel.java | 2 +- .../keyvalue/impl/StreamDataChannelBase.java | 13 +++++++++---- .../impl/TestKeyValueStreamDataChannel.java | 3 ++- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyValueStreamDataChannel.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyValueStreamDataChannel.java index 37ffae62c303..3218fb4f88d9 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyValueStreamDataChannel.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/KeyValueStreamDataChannel.java @@ -59,7 +59,7 @@ public int write(ReferenceCountedObject referenceCounted) throws IOException { getMetrics().incContainerOpsMetrics(getType()); assertOpen(); - assertSpaceAvailability(); + assertSpaceAvailability(referenceCounted.get().remaining()); return writeBuffers(referenceCounted, buffers, this::writeFileChannel); } diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java index 548bc63cabc6..3d8dead0472b 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java @@ -27,10 +27,12 @@ import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.hadoop.hdds.fs.SpaceUsageSource; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException; import org.apache.hadoop.ozone.container.common.helpers.ContainerMetrics; import org.apache.hadoop.ozone.container.common.impl.ContainerData; +import org.apache.hadoop.ozone.container.common.volume.HddsVolume; import org.apache.hadoop.util.Time; import org.apache.ratis.statemachine.StateMachine; import org.slf4j.Logger; @@ -94,11 +96,14 @@ public final boolean isOpen() { return getChannel().isOpen(); } - protected void assertSpaceAvailability() throws IOException { - if (containerData.getVolume().isVolumeFull()) { + protected void assertSpaceAvailability(int remaining) throws IOException { + HddsVolume volume = containerData.getVolume(); + SpaceUsageSource currentUsage = volume.getCurrentUsage(); + if (currentUsage.getAvailable() - volume.getFreeSpaceToSpare(currentUsage.getCapacity()) <= remaining) { throw new StorageContainerException("write failed for container " + containerData.getContainerID() - + " due to volume " + containerData.getVolume().getStorageID() + " out of space " - + containerData.getVolume().getCurrentUsage(), DISK_OUT_OF_SPACE); + + " with size " + remaining + " due to volume " + volume.getStorageID() + " out of space " + + volume.getCurrentUsage() + " with minimum free space required: " + + volume.getFreeSpaceToSpare(currentUsage.getCapacity()), DISK_OUT_OF_SPACE); } } diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java index 89ddbfe0976a..5e6311ff98fb 100644 --- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java @@ -171,7 +171,8 @@ public void testVolumeFullCase() throws Exception { when(mockContainerData.getVolume()).thenReturn(mockVolume); ContainerMetrics mockMetrics = mock(ContainerMetrics.class); KeyValueStreamDataChannel writeChannel = new KeyValueStreamDataChannel(tempFile, mockContainerData, mockMetrics); - assertThrows(StorageContainerException.class, writeChannel::assertSpaceAvailability); + assertThrows(StorageContainerException.class, + () -> writeChannel.assertSpaceAvailability(1)); final ByteBuffer putBlockBuf = ContainerCommandRequestMessage.toMessage( PUT_BLOCK_PROTO, null).getContent().asReadOnlyByteBuffer(); ReferenceCountedObject wrap = ReferenceCountedObject.wrap(putBlockBuf); From fc903d26cee248bdbb6d575852088d9a2af95566 Mon Sep 17 00:00:00 2001 From: sumitagrawl Date: Wed, 18 Jun 2025 14:13:26 +0530 Subject: [PATCH 4/7] fix test case --- .../container/keyvalue/impl/TestKeyValueStreamDataChannel.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java index 5e6311ff98fb..8e87878fcf60 100644 --- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java +++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/impl/TestKeyValueStreamDataChannel.java @@ -162,6 +162,7 @@ private static ContainerCommandRequestProto readPutBlockRequest(ByteBuffer b) @Test public void testVolumeFullCase() throws Exception { File tempFile = File.createTempFile("test-kv-stream", ".tmp"); + tempFile.deleteOnExit(); HddsVolume mockVolume = mock(HddsVolume.class); when(mockVolume.getStorageID()).thenReturn("storageId"); when(mockVolume.isVolumeFull()).thenReturn(true); @@ -176,7 +177,9 @@ public void testVolumeFullCase() throws Exception { final ByteBuffer putBlockBuf = ContainerCommandRequestMessage.toMessage( PUT_BLOCK_PROTO, null).getContent().asReadOnlyByteBuffer(); ReferenceCountedObject wrap = ReferenceCountedObject.wrap(putBlockBuf); + wrap.retain(); assertThrows(StorageContainerException.class, () -> writeChannel.write(wrap)); + wrap.release(); } @Test From 39e82b5555e19bf33e4cd6c03d1de09b144af2f4 Mon Sep 17 00:00:00 2001 From: sumitagrawl Date: Thu, 19 Jun 2025 08:34:12 +0530 Subject: [PATCH 5/7] fix review comment --- .../keyvalue/impl/StreamDataChannelBase.java | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java index 3d8dead0472b..7c5bf1c3ab01 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java @@ -96,14 +96,15 @@ public final boolean isOpen() { return getChannel().isOpen(); } - protected void assertSpaceAvailability(int remaining) throws IOException { - HddsVolume volume = containerData.getVolume(); - SpaceUsageSource currentUsage = volume.getCurrentUsage(); - if (currentUsage.getAvailable() - volume.getFreeSpaceToSpare(currentUsage.getCapacity()) <= remaining) { - throw new StorageContainerException("write failed for container " + containerData.getContainerID() - + " with size " + remaining + " due to volume " + volume.getStorageID() + " out of space " - + volume.getCurrentUsage() + " with minimum free space required: " - + volume.getFreeSpaceToSpare(currentUsage.getCapacity()), DISK_OUT_OF_SPACE); + protected void assertSpaceAvailability(int requested) throws StorageContainerException { + final HddsVolume volume = containerData.getVolume(); + final SpaceUsageSource currentUsage = volume.getCurrentUsage(); + final long spared = volume.getFreeSpaceToSpare(currentUsage.getCapacity()); + + if (currentUsage.getAvailable() - spared <= requested) { + throw new StorageContainerException("Failed to write " + requested + " bytes to container " + + containerData.getContainerID() + " due to volume " + volume.getStorageID() + " out of space " + + currentUsage + ", spared=" + spared, DISK_OUT_OF_SPACE); } } From 0e6d464434ee9e599e4395909c7fc5a1a4c75d88 Mon Sep 17 00:00:00 2001 From: sumitagrawl Date: Thu, 19 Jun 2025 18:28:13 +0530 Subject: [PATCH 6/7] fix review comment --- .../container/common/helpers/ContainerUtils.java | 15 +++++++++++++++ .../keyvalue/impl/StreamDataChannelBase.java | 14 ++------------ 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java index e5dec0846b48..c88dc13d8bb3 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java @@ -21,6 +21,7 @@ import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.CLOSED_CONTAINER_IO; import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.CONTAINER_CHECKSUM_ERROR; import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.CONTAINER_NOT_OPEN; +import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.DISK_OUT_OF_SPACE; import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.NO_SUCH_ALGORITHM; import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.UNABLE_TO_FIND_DATA_DIR; import static org.apache.hadoop.hdds.scm.protocolPB.ContainerCommandResponseBuilders.getContainerCommandResponse; @@ -44,6 +45,7 @@ import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.hdds.HddsConfigKeys; import org.apache.hadoop.hdds.conf.ConfigurationSource; +import org.apache.hadoop.hdds.fs.SpaceUsageSource; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto; @@ -55,6 +57,7 @@ import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml; import org.apache.hadoop.ozone.container.common.impl.ContainerSet; import org.apache.hadoop.ozone.container.common.interfaces.Container; +import org.apache.hadoop.ozone.container.common.volume.HddsVolume; import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -341,4 +344,16 @@ public static long getPendingDeletionBlocks(ContainerData containerData) { " not support."); } } + + public static void assertSpaceAvailability(long containerId, HddsVolume volume, int sizeRequested) + throws StorageContainerException { + final SpaceUsageSource currentUsage = volume.getCurrentUsage(); + final long spared = volume.getFreeSpaceToSpare(currentUsage.getCapacity()); + + if (currentUsage.getAvailable() - spared <= sizeRequested) { + throw new StorageContainerException("Failed to write " + sizeRequested + " bytes to container " + + containerId + " due to volume " + volume.getStorageID() + " out of space " + + currentUsage + ", minimum free space spared=" + spared, DISK_OUT_OF_SPACE); + } + } } diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java index 7c5bf1c3ab01..43bcea5e9bd9 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/keyvalue/impl/StreamDataChannelBase.java @@ -17,7 +17,6 @@ package org.apache.hadoop.ozone.container.keyvalue.impl; -import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.DISK_OUT_OF_SPACE; import static org.apache.hadoop.ozone.container.common.utils.StorageVolumeUtil.onFailure; import java.io.File; @@ -27,12 +26,11 @@ import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.hadoop.hdds.fs.SpaceUsageSource; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException; import org.apache.hadoop.ozone.container.common.helpers.ContainerMetrics; +import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; import org.apache.hadoop.ozone.container.common.impl.ContainerData; -import org.apache.hadoop.ozone.container.common.volume.HddsVolume; import org.apache.hadoop.util.Time; import org.apache.ratis.statemachine.StateMachine; import org.slf4j.Logger; @@ -97,15 +95,7 @@ public final boolean isOpen() { } protected void assertSpaceAvailability(int requested) throws StorageContainerException { - final HddsVolume volume = containerData.getVolume(); - final SpaceUsageSource currentUsage = volume.getCurrentUsage(); - final long spared = volume.getFreeSpaceToSpare(currentUsage.getCapacity()); - - if (currentUsage.getAvailable() - spared <= requested) { - throw new StorageContainerException("Failed to write " + requested + " bytes to container " - + containerData.getContainerID() + " due to volume " + volume.getStorageID() + " out of space " - + currentUsage + ", spared=" + spared, DISK_OUT_OF_SPACE); - } + ContainerUtils.assertSpaceAvailability(containerData.getContainerID(), containerData.getVolume(), requested); } public void setLinked() { From 1e259acb1254ead5c36cbd23d1ca5d9adf28ce90 Mon Sep 17 00:00:00 2001 From: sumitagrawl Date: Fri, 20 Jun 2025 10:13:53 +0530 Subject: [PATCH 7/7] review fix --- .../hadoop/ozone/container/common/helpers/ContainerUtils.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java index c88dc13d8bb3..c85f7cd9066c 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/common/helpers/ContainerUtils.java @@ -350,7 +350,7 @@ public static void assertSpaceAvailability(long containerId, HddsVolume volume, final SpaceUsageSource currentUsage = volume.getCurrentUsage(); final long spared = volume.getFreeSpaceToSpare(currentUsage.getCapacity()); - if (currentUsage.getAvailable() - spared <= sizeRequested) { + if (currentUsage.getAvailable() - spared < sizeRequested) { throw new StorageContainerException("Failed to write " + sizeRequested + " bytes to container " + containerId + " due to volume " + volume.getStorageID() + " out of space " + currentUsage + ", minimum free space spared=" + spared, DISK_OUT_OF_SPACE);