diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index 4d579c16af26..015e09e8ee18 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -2615,4 +2615,13 @@ List getLogEntries(Set serverNames, String logType, Server * Flush master local region */ void flushMasterStore() throws IOException; + + /** + * Clean Cache by evicting the blocks of files belonging to regions that are no longer served by + * the RegionServer. + * @param serverName ServerName + * @return A map of filename and number of blocks evicted. + * @throws IOException if a remote or network exception occurs + */ + Map uncacheStaleBlocks(ServerName serverName) throws IOException; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AdminOverAsyncAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AdminOverAsyncAdmin.java index 690b6406fd3a..70f559bf8d08 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AdminOverAsyncAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AdminOverAsyncAdmin.java @@ -1115,4 +1115,9 @@ public List getLogEntries(Set serverNames, String logType, public void flushMasterStore() throws IOException { get(admin.flushMasterStore()); } + + @Override + public Map uncacheStaleBlocks(ServerName serverName) throws IOException { + return get(admin.uncacheStaleBlocks(serverName)); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java index 960982f5e3f1..dce0bdcc18d9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java @@ -1837,4 +1837,12 @@ CompletableFuture> getLogEntries(Set serverNames, Str * Flush master local region */ CompletableFuture flushMasterStore(); + + /** + * Clean Cache by evicting the blocks of files belonging to regions that are no longer served by + * the RegionServer. + * @param serverName ServerName + * @return A map of filename and number of blocks evicted. + */ + CompletableFuture> uncacheStaleBlocks(ServerName serverName); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java index 5ee8a6ab8269..7c7f8617fddc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java @@ -990,4 +990,9 @@ public CompletableFuture> getLogEntries(Set serverNam public CompletableFuture flushMasterStore() { return wrap(rawAdmin.flushMasterStore()); } + + @Override + public CompletableFuture> uncacheStaleBlocks(ServerName serverName) { + return wrap(rawAdmin.uncacheStaleBlocks(serverName)); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java index ee1dfac16bd3..718d99ffbde0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java @@ -142,6 +142,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UncacheStaleBlocksRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UncacheStaleBlocksResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; @@ -4453,4 +4455,15 @@ Void> call(controller, stub, request.build(), (s, c, req, done) -> s.flushMasterStore(c, req, done), resp -> null)) .call(); } + + @Override + public CompletableFuture> uncacheStaleBlocks(ServerName serverName) { + UncacheStaleBlocksRequest.Builder request = UncacheStaleBlocksRequest.newBuilder(); + return this.> newAdminCaller() + .action((controller, stub) -> this.> adminCall(controller, stub, request.build(), + (s, c, req, done) -> s.uncacheStaleBlocks(c, req, done), + resp -> resp.getUncachedFilesMap())) + .serverName(serverName).call(); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index aa3cb39c5971..3ec9259bc150 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -163,6 +163,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UncacheStaleBlocksRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UncacheStaleBlocksResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; @@ -3767,4 +3769,20 @@ public static T parseDelimitedFrom(InputStream in, Parser return parser.parseFrom(bytes); } } + + /** + * Clean Cache by evicting the blocks of files belonging to regions that are no longer served by + * the RegionServer. + */ + public static Map uncacheStaleBlocks(final RpcController controller, + final AdminService.BlockingInterface admin) throws IOException { + UncacheStaleBlocksRequest request = UncacheStaleBlocksRequest.newBuilder().build(); + UncacheStaleBlocksResponse response = null; + try { + response = admin.uncacheStaleBlocks(controller, request); + } catch (ServiceException se) { + throw getRemoteException(se); + } + return response.getUncachedFilesMap(); + } } diff --git a/hbase-protocol-shaded/src/main/protobuf/server/region/Admin.proto b/hbase-protocol-shaded/src/main/protobuf/server/region/Admin.proto index cd88a0ca7cdb..337d2e7168ac 100644 --- a/hbase-protocol-shaded/src/main/protobuf/server/region/Admin.proto +++ b/hbase-protocol-shaded/src/main/protobuf/server/region/Admin.proto @@ -329,6 +329,13 @@ message ClearSlowLogResponses { required bool is_cleaned = 1; } +message UncacheStaleBlocksRequest { +} + +message UncacheStaleBlocksResponse { + map uncached_files = 1; +} + service AdminService { rpc GetRegionInfo(GetRegionInfoRequest) returns(GetRegionInfoResponse); @@ -405,4 +412,7 @@ service AdminService { rpc GetLogEntries(LogRequest) returns(LogEntry); + rpc UncacheStaleBlocks(UncacheStaleBlocksRequest) + returns(UncacheStaleBlocksResponse); + } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java index 4e795ec75e75..7fc6d71f51fd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java @@ -18,6 +18,9 @@ package org.apache.hadoop.hbase.io.hfile; import java.util.Iterator; +import java.util.Map; +import java.util.Optional; +import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.yetus.audience.InterfaceAudience; /** @@ -161,4 +164,14 @@ default Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repe default boolean isMetaBlock(BlockType blockType) { return blockType != null && blockType.getCategory() != BlockType.BlockCategory.DATA; } + + /** + * Clean Cache by evicting the blocks of files belonging to regions that are no longer served by + * the RegionServer. + * @param server HRegionServer + * @return A map of filename and number of blocks evicted. + */ + default Optional> uncacheStaleBlocks(HRegionServer server) { + return Optional.empty(); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java index d616d6f40d9f..c8f8772b0c32 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java @@ -17,9 +17,13 @@ */ package org.apache.hadoop.hbase.io.hfile; +import java.util.HashMap; import java.util.Iterator; +import java.util.Map; +import java.util.Optional; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache; +import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.yetus.audience.InterfaceAudience; /** @@ -400,4 +404,13 @@ public FirstLevelBlockCache getFirstLevelCache() { public BlockCache getSecondLevelCache() { return l2Cache; } + + @Override + public Optional> uncacheStaleBlocks(HRegionServer server) { + Map uncachedStaleBlocksMap = + l1Cache.uncacheStaleBlocks(server).orElseGet(HashMap::new); + l2Cache.uncacheStaleBlocks(server).ifPresent( + map2 -> map2.forEach((key, value) -> uncachedStaleBlocksMap.merge(key, value, Integer::sum))); + return Optional.of(uncachedStaleBlocksMap); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index 0faf510f5db6..dfd54015f4b4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -27,6 +27,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -55,6 +56,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseIOException; +import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.io.ByteBuffAllocator; @@ -75,6 +77,7 @@ import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.nio.RefCnt; import org.apache.hadoop.hbase.protobuf.ProtobufMagic; +import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.IdReadWriteLock; @@ -2002,4 +2005,27 @@ public void fileCacheCompleted(Path filePath, long size) { regionCachedSizeMap.merge(regionName, size, (oldpf, fileSize) -> oldpf + fileSize); } + @Override + public Optional> uncacheStaleBlocks(HRegionServer server) { + Map evictedFilesWithStaleBlocks = new HashMap<>(); + + fullyCachedFiles.forEach((fileName, value) -> { + int blocksEvicted; + try { + if (!server.getRegionByEncodedName(value.getFirst()).isAvailable()) { + blocksEvicted = evictBlocksByHfileName(fileName); + } else { + blocksEvicted = 0; + } + } catch (NotServingRegionException nsre) { + LOG.debug( + "Evicting blocks for file {} as the region {} is not served by the Region Server {} anymore.", + fileName, fullyCachedFiles.get(fileName).getFirst(), + server.getServerName().getServerName()); + blocksEvicted = evictBlocksByHfileName(fileName); + } + evictedFilesWithStaleBlocks.put(fileName, blocksEvicted); + }); + return Optional.of(evictedFilesWithStaleBlocks); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index b6a17d8503b2..f567a022c7fb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -182,6 +182,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UncacheStaleBlocksRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UncacheStaleBlocksResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest; @@ -3609,4 +3611,10 @@ public FlushTableResponse flushTable(RpcController controller, FlushTableRequest throw new ServiceException(ioe); } } + + @Override + public UncacheStaleBlocksResponse uncacheStaleBlocks(RpcController controller, + UncacheStaleBlocksRequest request) throws ServiceException { + throw new ServiceException(new DoNotRetryIOException("Unsupported method on master")); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 57efe505c126..dcaa096d498e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -190,6 +190,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UncacheStaleBlocksRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UncacheStaleBlocksResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry; @@ -3933,4 +3935,15 @@ public void onConfigurationChange(Configuration conf) { super.onConfigurationChange(conf); setReloadableGuardrails(conf); } + + @Override + public UncacheStaleBlocksResponse uncacheStaleBlocks(RpcController controller, + UncacheStaleBlocksRequest request) throws ServiceException { + UncacheStaleBlocksResponse.Builder responseBuilder = UncacheStaleBlocksResponse.newBuilder(); + Map evictedFilesWithStaleBlocks = new HashMap<>(); + server.getBlockCache().flatMap(bc -> bc.uncacheStaleBlocks(server)) + .ifPresent(evictedFilesWithStaleBlocks::putAll); + responseBuilder.putAllUncachedFiles(evictedFilesWithStaleBlocks); + return responseBuilder.build(); + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCleanBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCleanBucketCache.java new file mode 100644 index 000000000000..d8958cfb4b4b --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCleanBucketCache.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.io.hfile; + +import static org.apache.hadoop.hbase.HConstants.BUCKET_CACHE_IOENGINE_KEY; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; + +import java.io.IOException; +import java.util.Map; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtil; +import org.apache.hadoop.hbase.SingleProcessHBaseCluster; +import org.apache.hadoop.hbase.StartTestingClusterOption; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; +import org.apache.hadoop.hbase.client.Put; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; +import org.apache.hadoop.hbase.regionserver.HRegion; +import org.apache.hadoop.hbase.regionserver.HRegionServer; +import org.apache.hadoop.hbase.testclassification.IOTests; +import org.apache.hadoop.hbase.testclassification.MediumTests; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Category({ IOTests.class, MediumTests.class }) +public class TestCleanBucketCache { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestCleanBucketCache.class); + + private static final Logger LOG = LoggerFactory.getLogger(TestCleanBucketCache.class); + + private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); + + private Configuration conf; + Path testDir; + MiniZooKeeperCluster zkCluster; + SingleProcessHBaseCluster cluster; + StartTestingClusterOption option = + StartTestingClusterOption.builder().numRegionServers(2).build(); + + @Before + public void setup() throws Exception { + conf = TEST_UTIL.getConfiguration(); + testDir = TEST_UTIL.getDataTestDir(); + TEST_UTIL.getTestFileSystem().mkdirs(testDir); + + conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true); + conf.set(BUCKET_CACHE_IOENGINE_KEY, "file:" + testDir + "/bucket.cache"); + conf.setInt("hbase.bucketcache.size", 400); + conf.set("hbase.bucketcache.persistent.path", testDir + "/bucket.persistence"); + conf.setLong(CacheConfig.BUCKETCACHE_PERSIST_INTERVAL_KEY, 100); + conf.setBoolean(CacheConfig.EVICT_BLOCKS_ON_CLOSE_KEY, true); + zkCluster = TEST_UTIL.startMiniZKCluster(); + cluster = TEST_UTIL.startMiniHBaseCluster(option); + cluster.setConf(conf); + } + + @Test + public void testCleanBucketCache() throws Exception { + // Write to table and flush + TableName tableRegionClose = writeDataToTable(); + + HRegionServer regionServingRS = + cluster.getRegionServer(1).getRegions(tableRegionClose).size() == 1 + ? cluster.getRegionServer(1) + : cluster.getRegionServer(0); + + assertTrue(regionServingRS.getBlockCache().isPresent()); + long oldUsedCacheSize = + regionServingRS.getBlockCache().get().getBlockCaches()[1].getCurrentSize(); + long blocksCount = regionServingRS.getBlockCache().get().getBlockCaches()[1].getBlockCount(); + assertNotEquals(0, blocksCount); + + HRegion r = regionServingRS.getRegions().get(0); + r.close(false, false, true); + long newUsedCacheSize = + regionServingRS.getBlockCache().get().getBlockCaches()[1].getCurrentSize(); + assertEquals(oldUsedCacheSize, newUsedCacheSize); + assertNotEquals(0, regionServingRS.getBlockCache().get().getBlockCaches()[1].getBlockCount()); + + Admin admin = TEST_UTIL.getAdmin(); + Map response = admin.uncacheStaleBlocks(regionServingRS.getServerName()); + + long newCacheSize = regionServingRS.getBlockCache().get().getBlockCaches()[1].getCurrentSize(); + assertTrue(newCacheSize < newUsedCacheSize); + response.values().forEach(value -> assertEquals(blocksCount, value.intValue())); + } + + public TableName writeDataToTable() throws IOException, InterruptedException { + TableName tableName = TableName.valueOf("table1"); + byte[] row0 = Bytes.toBytes("row1"); + byte[] row1 = Bytes.toBytes("row2"); + byte[] family = Bytes.toBytes("family"); + byte[] qf1 = Bytes.toBytes("qf1"); + byte[] qf2 = Bytes.toBytes("qf2"); + byte[] value1 = Bytes.toBytes("value1"); + byte[] value2 = Bytes.toBytes("value2"); + + TableDescriptor td = TableDescriptorBuilder.newBuilder(tableName) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build(); + Table table = TEST_UTIL.createTable(td, null); + try { + // put data + Put put0 = new Put(row0); + put0.addColumn(family, qf1, 1, value1); + table.put(put0); + Put put1 = new Put(row1); + put1.addColumn(family, qf2, 1, value2); + table.put(put1); + TEST_UTIL.flush(tableName); + } finally { + Thread.sleep(1000); + } + assertEquals(1, cluster.getRegions(tableName).size()); + return tableName; + } + + @After + public void tearDown() throws Exception { + TEST_UTIL.shutdownMiniCluster(); + TEST_UTIL.cleanupDataTestDirOnTestFS(String.valueOf(testDir)); + if (zkCluster != null) { + zkCluster.shutdown(); + } + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 9974c824f889..f0d008bb6370 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -115,6 +115,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UncacheStaleBlocksRequest; +import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UncacheStaleBlocksResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest; @@ -748,4 +750,10 @@ public ReplicateWALEntryResponse replicateToReplica(RpcController controller, ReplicateWALEntryRequest request) throws ServiceException { return null; } + + @Override + public UncacheStaleBlocksResponse uncacheStaleBlocks(RpcController controller, + UncacheStaleBlocksRequest request) throws ServiceException { + return null; + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdmin.java index 9b1d8524d003..da648e396603 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdmin.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdmin.java @@ -959,6 +959,11 @@ public void flushMasterStore() throws IOException { admin.flushMasterStore(); } + @Override + public Map uncacheStaleBlocks(ServerName serverName) throws IOException { + return admin.uncacheStaleBlocks(serverName); + } + @Override public boolean replicationPeerModificationSwitch(boolean on, boolean drainProcedures) throws IOException { diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java index 1b7b6938524a..7c2792a4a879 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/client/ThriftAdmin.java @@ -1347,4 +1347,9 @@ public boolean isReplicationPeerModificationEnabled() throws IOException { throw new NotImplementedException( "isReplicationPeerModificationEnabled not supported in ThriftAdmin"); } + + @Override + public Map uncacheStaleBlocks(ServerName serverName) throws IOException { + throw new NotImplementedException("uncacheStaleBlocks not supported in ThriftAdmin"); + } }