From 81a525998f247d3dcc76f8cfea3518cd7b254cad Mon Sep 17 00:00:00 2001 From: Wellington Ramos Chevreuil Date: Wed, 20 Mar 2024 16:04:38 +0000 Subject: [PATCH 1/2] Fixing lack of updates to blocksByHFile when reloading persistent cache Change-Id: I7f0253c119bc1687804d50463bdb1d9ca2c40875 --- .../hadoop/hbase/io/hfile/bucket/BucketCache.java | 15 ++++++--------- .../hbase/io/hfile/bucket/BucketProtoUtils.java | 11 +++++++++-- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index 57f71b31894e..13a03a0258e4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -222,13 +222,8 @@ public class BucketCache implements BlockCache, HeapSize { */ transient final IdReadWriteLock offsetLock; - final NavigableSet blocksByHFile = new ConcurrentSkipListSet<>((a, b) -> { - int nameComparison = a.getHfileName().compareTo(b.getHfileName()); - if (nameComparison != 0) { - return nameComparison; - } - return Long.compare(a.getOffset(), b.getOffset()); - }); + NavigableSet blocksByHFile = new ConcurrentSkipListSet<>( + Comparator.comparing(BlockCacheKey::getHfileName).thenComparingLong(BlockCacheKey::getOffset)); /** Statistics thread schedule pool (for heavy debugging, could remove) */ private transient final ScheduledExecutorService scheduleThreadPool = @@ -1471,8 +1466,10 @@ private void verifyCapacityAndClasses(long capacitySize, String ioclass, String } private void parsePB(BucketCacheProtos.BucketCacheEntry proto) throws IOException { - backingMap = BucketProtoUtils.fromPB(proto.getDeserializersMap(), proto.getBackingMap(), - this::createRecycler); + Pair, NavigableSet> pair = + BucketProtoUtils.fromPB(proto.getDeserializersMap(), proto.getBackingMap(), this::createRecycler); + backingMap = pair.getFirst(); + blocksByHFile = pair.getSecond(); fullyCachedFiles.clear(); fullyCachedFiles.putAll(BucketProtoUtils.fromPB(proto.getCachedFilesMap())); if (proto.hasChecksum()) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.java index 7cc5050506e4..af168037dbc8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.java @@ -18,9 +18,12 @@ package org.apache.hadoop.hbase.io.hfile.bucket; import java.io.IOException; +import java.util.Comparator; import java.util.HashMap; import java.util.Map; +import java.util.NavigableSet; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentSkipListSet; import java.util.function.Function; import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.io.ByteBuffAllocator.Recycler; @@ -121,10 +124,13 @@ private static BucketCacheProtos.BlockPriority toPB(BlockPriority p) { } } - static ConcurrentHashMap fromPB(Map deserializers, + static Pair, NavigableSet> fromPB(Map deserializers, BucketCacheProtos.BackingMap backingMap, Function createRecycler) throws IOException { ConcurrentHashMap result = new ConcurrentHashMap<>(); + NavigableSet resultSet = + new ConcurrentSkipListSet<>(Comparator.comparing(BlockCacheKey::getHfileName). + thenComparingLong(BlockCacheKey::getOffset)); for (BucketCacheProtos.BackingMapEntry entry : backingMap.getEntryList()) { BucketCacheProtos.BlockCacheKey protoKey = entry.getKey(); BlockCacheKey key = new BlockCacheKey(protoKey.getHfilename(), protoKey.getOffset(), @@ -153,8 +159,9 @@ static ConcurrentHashMap fromPB(Map throw new IOException("Unknown deserializer class found: " + deserializerClass); } result.put(key, value); + resultSet.add(key); } - return result; + return new Pair<>(result, resultSet); } private static BlockType fromPb(BucketCacheProtos.BlockType blockType) { From a9de3561851622617cafb1667187e9ddd854c490 Mon Sep 17 00:00:00 2001 From: Wellington Ramos Chevreuil Date: Wed, 20 Mar 2024 20:40:14 +0000 Subject: [PATCH 2/2] HBASE-28450 BuckeCache.evictBlocksByHfileName won't work after a cache recovery from file Change-Id: I338a5af498a6914fbdaa79ad6978820ec204833d --- .../hbase/io/hfile/bucket/BucketCache.java | 3 +- .../io/hfile/bucket/BucketProtoUtils.java | 11 +++---- .../TestRecoveryPersistentBucketCache.java | 33 +++++++++++++++++++ 3 files changed, 40 insertions(+), 7 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index 13a03a0258e4..912a3ab524fe 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -1467,7 +1467,8 @@ private void verifyCapacityAndClasses(long capacitySize, String ioclass, String private void parsePB(BucketCacheProtos.BucketCacheEntry proto) throws IOException { Pair, NavigableSet> pair = - BucketProtoUtils.fromPB(proto.getDeserializersMap(), proto.getBackingMap(), this::createRecycler); + BucketProtoUtils.fromPB(proto.getDeserializersMap(), proto.getBackingMap(), + this::createRecycler); backingMap = pair.getFirst(); blocksByHFile = pair.getSecond(); fullyCachedFiles.clear(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.java index af168037dbc8..4b42414fb9c5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketProtoUtils.java @@ -124,13 +124,12 @@ private static BucketCacheProtos.BlockPriority toPB(BlockPriority p) { } } - static Pair, NavigableSet> fromPB(Map deserializers, - BucketCacheProtos.BackingMap backingMap, Function createRecycler) - throws IOException { + static Pair, NavigableSet> fromPB( + Map deserializers, BucketCacheProtos.BackingMap backingMap, + Function createRecycler) throws IOException { ConcurrentHashMap result = new ConcurrentHashMap<>(); - NavigableSet resultSet = - new ConcurrentSkipListSet<>(Comparator.comparing(BlockCacheKey::getHfileName). - thenComparingLong(BlockCacheKey::getOffset)); + NavigableSet resultSet = new ConcurrentSkipListSet<>(Comparator + .comparing(BlockCacheKey::getHfileName).thenComparingLong(BlockCacheKey::getOffset)); for (BucketCacheProtos.BackingMapEntry entry : backingMap.getEntryList()) { BucketCacheProtos.BlockCacheKey protoKey = entry.getKey(); BlockCacheKey key = new BlockCacheKey(protoKey.getHfilename(), protoKey.getOffset(), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java index ad91d01f8cfd..63ff334826d6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestRecoveryPersistentBucketCache.java @@ -108,6 +108,39 @@ public void testBucketCacheRecovery() throws Exception { TEST_UTIL.cleanupTestDir(); } + @Test + public void testBucketCacheEvictByHFileAfterRecovery() throws Exception { + HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + Path testDir = TEST_UTIL.getDataTestDir(); + TEST_UTIL.getTestFileSystem().mkdirs(testDir); + Configuration conf = HBaseConfiguration.create(); + // Disables the persister thread by setting its interval to MAX_VALUE + conf.setLong(BUCKETCACHE_PERSIST_INTERVAL_KEY, Long.MAX_VALUE); + int[] bucketSizes = new int[] { 8 * 1024 + 1024 }; + BucketCache bucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, + 8192, bucketSizes, writeThreads, writerQLen, testDir + "/bucket.persistence", + DEFAULT_ERROR_TOLERATION_DURATION, conf); + + CacheTestUtils.HFileBlockPair[] blocks = CacheTestUtils.generateHFileBlocks(8192, 4); + + // Add four blocks + cacheAndWaitUntilFlushedToBucket(bucketCache, blocks[0].getBlockName(), blocks[0].getBlock()); + cacheAndWaitUntilFlushedToBucket(bucketCache, blocks[1].getBlockName(), blocks[1].getBlock()); + cacheAndWaitUntilFlushedToBucket(bucketCache, blocks[2].getBlockName(), blocks[2].getBlock()); + cacheAndWaitUntilFlushedToBucket(bucketCache, blocks[3].getBlockName(), blocks[3].getBlock()); + // saves the current state of the cache + bucketCache.persistToFile(); + + BucketCache newBucketCache = new BucketCache("file:" + testDir + "/bucket.cache", capacitySize, + 8192, bucketSizes, writeThreads, writerQLen, testDir + "/bucket.persistence", + DEFAULT_ERROR_TOLERATION_DURATION, conf); + Thread.sleep(100); + assertEquals(4, newBucketCache.backingMap.size()); + newBucketCache.evictBlocksByHfileName(blocks[0].getBlockName().getHfileName()); + assertEquals(3, newBucketCache.backingMap.size()); + TEST_UTIL.cleanupTestDir(); + } + private void waitUntilFlushedToBucket(BucketCache cache, BlockCacheKey cacheKey) throws InterruptedException { while (!cache.backingMap.containsKey(cacheKey) || cache.ramCache.containsKey(cacheKey)) {