diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java index 6959edd367c4..c4638cc20b1c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java @@ -255,7 +255,7 @@ public byte[] encodeData() { } BufferGrabbingByteArrayOutputStream stream = new BufferGrabbingByteArrayOutputStream(); baos.writeTo(stream); - this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.getOurBytes()); + this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.toByteArray()); } catch (IOException e) { throw new RuntimeException(String.format( "Bug in encoding part of algorithm %s. " + @@ -268,14 +268,15 @@ public byte[] encodeData() { private static class BufferGrabbingByteArrayOutputStream extends ByteArrayOutputStream { private byte[] ourBytes; - private synchronized byte[] getOurBytes() { - return ourBytes; - } - @Override public synchronized void write(byte[] b, int off, int len) { this.ourBytes = b; } + + @Override + public synchronized byte[] toByteArray() { + return ourBytes; + } } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index f9d92aba9c32..503cc6bf192c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -3078,7 +3078,7 @@ public List listNamespaces() throws IOException { } } if (cpHost != null) { - bypass = cpHost.postListNamespaces(namespaces); + cpHost.postListNamespaces(namespaces); } return namespaces; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java index f48533ddf43e..9c9b4d0566c1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java @@ -440,9 +440,10 @@ private void applyThrottle(final Quotas.Builder quotas, final ThrottleRequest re case READ_SIZE: if (req.hasTimedQuota()) { throttle.setReadSize(req.getTimedQuota()); - } else { - throttle.clearReadSize(); - } + } else { + throttle.clearReadSize(); + } + break; case REQUEST_CAPACITY_UNIT: if (req.hasTimedQuota()) { throttle.setReqCapacityUnit(req.getTimedQuota()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index aa0375b8460d..b4b7c5f45265 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -7959,7 +7959,9 @@ public Result append(Append mutate, long nonceGroup, long nonce) throws IOExcept for (Map.Entry> entry: removedCellsForMemStore.entrySet()) { entry.getKey().add(entry.getValue()); } - if (we != null) mvcc.complete(we); + if (we != null) { + mvcc.complete(we); + } } else if (we != null) { mvcc.completeAndWait(we); } @@ -8180,6 +8182,7 @@ private Result doIncrement(Increment increment, long nonceGroup, long nonce) thr rowLock.release(); } // if the wal sync was unsuccessful, remove keys from memstore + WriteEntry we = walKey != null ? walKey.getWriteEntry() : null; if (doRollBackMemstore) { for (Map.Entry> entry: forMemStore.entrySet()) { rollbackMemstore(entry.getKey(), entry.getValue()); @@ -8187,9 +8190,13 @@ private Result doIncrement(Increment increment, long nonceGroup, long nonce) thr for (Map.Entry> entry: removedCellsForMemStore.entrySet()) { entry.getKey().add(entry.getValue()); } - if (walKey != null) mvcc.complete(walKey.getWriteEntry()); + if (we != null) { + mvcc.complete(we); + } } else { - if (walKey != null) mvcc.completeAndWait(walKey.getWriteEntry()); + if (we != null) { + mvcc.completeAndWait(we); + } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index fb4ff9a9e061..41e891863bdc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -2796,10 +2796,14 @@ private void removeCompactedfiles(Collection compactedfiles) // Just close and return filesToRemove.add(file); } else { - LOG.info("Can't archive compacted file " + file.getPath() + if (r != null) { + LOG.info("Can't archive compacted file " + file.getPath() + " because of either isCompactedAway=" + r.isCompactedAway() + " or file has reference, isReferencedInReads=" + r.isReferencedInReads() + ", refCount=" + r.getRefCount() + ", skipping for now."); + } else { + LOG.info("Can't archive compacted file " + file.getPath() + ", skipping for now."); + } } } catch (Exception e) { LOG.error( diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/compaction/MajorCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/compaction/MajorCompactor.java index 96faee8c2402..b2918e80dd0e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/compaction/MajorCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/compaction/MajorCompactor.java @@ -69,7 +69,7 @@ public class MajorCompactor extends Configured implements Tool { private static final Logger LOG = LoggerFactory.getLogger(MajorCompactor.class); - protected static final Set ERRORS = Sets.newHashSet(); + static final Set ERRORS = Sets.newHashSet(); protected ClusterCompactionQueues clusterCompactionQueues; private long timestamp;