diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java index 21589fd0e0d0..5242bd94c8cc 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; -import java.lang.reflect.Method; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; @@ -61,7 +60,6 @@ public class TableRecordReaderImpl { private ImmutableBytesWritable key = null; private Result value = null; private TaskAttemptContext context = null; - private Method getCounter = null; private long numRestarts = 0; private long numStale = 0; private long timestamp; @@ -97,25 +95,6 @@ public void restart(byte[] firstRow) throws IOException { } } - /** - * In new mapreduce APIs, TaskAttemptContext has two getCounter methods - * Check if getCounter(String, String) method is available. - * @return The getCounter method or null if not available. - */ - protected static Method retrieveGetCounterWithStringsParams(TaskAttemptContext context) - throws IOException { - Method m = null; - try { - m = context.getClass().getMethod("getCounter", - new Class [] {String.class, String.class}); - } catch (SecurityException e) { - throw new IOException("Failed test for getCounter", e); - } catch (NoSuchMethodException e) { - // Ignore - } - return m; - } - /** * Sets the HBase table. * @param htable The table to scan. @@ -144,7 +123,6 @@ public void initialize(InputSplit inputsplit, InterruptedException { if (context != null) { this.context = context; - getCounter = retrieveGetCounterWithStringsParams(context); } restart(scan.getStartRow()); } @@ -212,8 +190,7 @@ public boolean nextKeyValue() throws IOException, InterruptedException { rowcount ++; if (rowcount >= logPerRowCount) { long now = System.currentTimeMillis(); - LOG.info("Mapper took " + (now-timestamp) - + "ms to process " + rowcount + " rows"); + LOG.info("Mapper took {}ms to process {} rows", (now - timestamp), rowcount); timestamp = now; rowcount = 0; } @@ -265,8 +242,7 @@ public boolean nextKeyValue() throws IOException, InterruptedException { updateCounters(); if (logScannerActivity) { long now = System.currentTimeMillis(); - LOG.info("Mapper took " + (now-timestamp) - + "ms to process " + rowcount + " rows"); + LOG.info("Mapper took {}ms to process {} rows", (now - timestamp), rowcount); LOG.info(ioe.toString(), ioe); String lastRow = lastSuccessfulRow == null ? "null" : Bytes.toStringBinary(lastSuccessfulRow); @@ -282,36 +258,40 @@ public boolean nextKeyValue() throws IOException, InterruptedException { * If hbase runs on old version of mapreduce, it won't be able to get * access to counters and TableRecorderReader can't update counter values. */ - private void updateCounters() throws IOException { + private void updateCounters() { ScanMetrics scanMetrics = scanner.getScanMetrics(); if (scanMetrics == null) { return; } - updateCounters(scanMetrics, numRestarts, getCounter, context, numStale); + updateCounters(scanMetrics, numRestarts, context, numStale); } protected static void updateCounters(ScanMetrics scanMetrics, long numScannerRestarts, - Method getCounter, TaskAttemptContext context, long numStale) { + TaskAttemptContext context, long numStale) { // we can get access to counters only if hbase uses new mapreduce APIs - if (getCounter == null) { + if (context == null) { return; } - try { - for (Map.Entry entry:scanMetrics.getMetricsMap().entrySet()) { - Counter ct = (Counter)getCounter.invoke(context, - HBASE_COUNTER_GROUP_NAME, entry.getKey()); - - ct.increment(entry.getValue()); + for (Map.Entry entry : scanMetrics.getMetricsMap().entrySet()) { + Counter counter = context.getCounter(HBASE_COUNTER_GROUP_NAME, entry.getKey()); + if (counter != null) { + counter.increment(entry.getValue()); + } + } + if (numScannerRestarts != 0L) { + Counter counter = context.getCounter(HBASE_COUNTER_GROUP_NAME, "NUM_SCANNER_RESTARTS"); + if (counter != null) { + counter.increment(numScannerRestarts); + } + } + if (numStale != 0L) { + Counter counter = context.getCounter(HBASE_COUNTER_GROUP_NAME, "NUM_SCAN_RESULTS_STALE"); + if (counter != null) { + counter.increment(numStale); + } } - ((Counter) getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME, - "NUM_SCANNER_RESTARTS")).increment(numScannerRestarts); - ((Counter) getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME, - "NUM_SCAN_RESULTS_STALE")).increment(numStale); - } catch (Exception e) { - LOG.debug("can't update counter." + StringUtils.stringifyException(e)); - } } /** diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java index 2fbbb5180372..3ca6c0323688 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java @@ -21,7 +21,6 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.fs.Path; @@ -138,13 +137,11 @@ static class TableSnapshotRegionRecordReader extends private TableSnapshotInputFormatImpl.RecordReader delegate = new TableSnapshotInputFormatImpl.RecordReader(); private TaskAttemptContext context; - private Method getCounter; @Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { this.context = context; - getCounter = TableRecordReaderImpl.retrieveGetCounterWithStringsParams(context); delegate.initialize( ((TableSnapshotRegionSplit) split).delegate, context.getConfiguration()); @@ -156,7 +153,7 @@ public boolean nextKeyValue() throws IOException, InterruptedException { if (result) { ScanMetrics scanMetrics = delegate.getScanner().getScanMetrics(); if (scanMetrics != null && context != null) { - TableRecordReaderImpl.updateCounters(scanMetrics, 0, getCounter, context, 0); + TableRecordReaderImpl.updateCounters(scanMetrics, 0, context, 0); } } return result;