diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java index 78ae9a64f218..b3c581332c09 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java @@ -95,23 +95,32 @@ private short put(byte[] array, int offset, int length) { byte[] stored = new byte[length]; Bytes.putBytes(stored, 0, array, offset, length); - if (currSize < initSize) { - // There is space to add without evicting. - if (indexToNode[currSize] == null) { - indexToNode[currSize] = new Node(); - } - indexToNode[currSize].setContents(stored, 0, stored.length); - setHead(indexToNode[currSize]); - short ret = (short) currSize++; - nodeToIndex.put(indexToNode[ret], ret); - return ret; + Node node = new Node(); + node.setContents(stored, 0, stored.length); + if (nodeToIndex.containsKey(node)) { + short index = nodeToIndex.get(node); + node = indexToNode[index]; + moveToHead(node); + return index; } else { - short s = nodeToIndex.remove(tail); - tail.setContents(stored, 0, stored.length); - // we need to rehash this. - nodeToIndex.put(tail, s); - moveToHead(tail); - return s; + if (currSize < initSize) { + // There is space to add without evicting. + if (indexToNode[currSize] == null) { + indexToNode[currSize] = new Node(); + } + indexToNode[currSize].setContents(stored, 0, stored.length); + setHead(indexToNode[currSize]); + short ret = (short) currSize++; + nodeToIndex.put(indexToNode[ret], ret); + return ret; + } else { + short s = nodeToIndex.remove(tail); + tail.setContents(stored, 0, stored.length); + // we need to rehash this. + nodeToIndex.put(tail, s); + moveToHead(tail); + return s; + } } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java index f8d0c220e833..f3335d80b763 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java @@ -63,12 +63,25 @@ public void testPassingEmptyArrayToFindEntry() { @Test public void testPassingSameArrayToAddEntry() { // Add random predefined byte array, in this case a random byte array from - // HConstants. Assert that when we add, we get new index. Thats how it - // works. + // HConstants. + // Assert that when we add, we get old index. + // Because we DO NOT need to write a new one. int len = HConstants.CATALOG_FAMILY.length; int index = testee.addEntry(HConstants.CATALOG_FAMILY, 0, len); - assertFalse(index == testee.addEntry(HConstants.CATALOG_FAMILY, 0, len)); - assertFalse(index == testee.addEntry(HConstants.CATALOG_FAMILY, 0, len)); + assertTrue(index == testee.addEntry(HConstants.CATALOG_FAMILY, 0, len)); + assertTrue(index == testee.addEntry(HConstants.CATALOG_FAMILY, 0, len)); + } + + @Test + public void testPassingSameArrayToAddEntryThenEvict() { + testee.init(3); + byte[] byte0 = Bytes.toBytes(0); + byte[] byte1 = Bytes.toBytes(1); + assertEquals(0, testee.addEntry(byte0, 0, byte0.length)); + assertEquals(0, testee.addEntry(byte0, 0, byte0.length)); + assertEquals(0, testee.addEntry(byte0, 0, byte0.length)); + assertEquals(1, testee.addEntry(byte1, 0, byte1.length)); + assertEquals(0, testee.addEntry(byte0, 0, byte0.length)); } @Test