You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zg...@apache.org on 2020/09/02 09:13:02 UTC

[hbase] branch branch-2 updated: HBASE-24831 Avoid invoke Counter using reflection in SnapshotInputFormat (#2209)

This is an automated email from the ASF dual-hosted git repository.

zghao pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2 by this push:
     new e042cab  HBASE-24831 Avoid invoke Counter using reflection in SnapshotInputFormat (#2209)
e042cab is described below

commit e042cabfb334c627d58f9b1c7e026b8fb5b61892
Author: Yechao Chen <ch...@gmail.com>
AuthorDate: Wed Sep 2 16:22:44 2020 +0800

    HBASE-24831 Avoid invoke Counter using reflection in SnapshotInputFormat (#2209)
    
    Signed-off-by: Duo Zhang <zh...@apache.org>
---
 .../hbase/mapreduce/TableRecordReaderImpl.java     | 66 ++++++++--------------
 .../hbase/mapreduce/TableSnapshotInputFormat.java  |  5 +-
 2 files changed, 24 insertions(+), 47 deletions(-)

diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
index 439a2a6..821df12 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hbase.mapreduce;
 
 import java.io.IOException;
-import java.lang.reflect.Method;
 import java.util.Map;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
@@ -60,7 +59,6 @@ public class TableRecordReaderImpl {
   private ImmutableBytesWritable key = null;
   private Result value = null;
   private TaskAttemptContext context = null;
-  private Method getCounter = null;
   private long numRestarts = 0;
   private long numStale = 0;
   private long timestamp;
@@ -97,25 +95,6 @@ public class TableRecordReaderImpl {
   }
 
   /**
-   * In new mapreduce APIs, TaskAttemptContext has two getCounter methods
-   * Check if getCounter(String, String) method is available.
-   * @return The getCounter method or null if not available.
-   */
-  protected static Method retrieveGetCounterWithStringsParams(TaskAttemptContext context)
-      throws IOException {
-    Method m = null;
-    try {
-      m = context.getClass().getMethod("getCounter",
-        new Class [] {String.class, String.class});
-    } catch (SecurityException e) {
-      throw new IOException("Failed test for getCounter", e);
-    } catch (NoSuchMethodException e) {
-      // Ignore
-    }
-    return m;
-  }
-
-  /**
    * Sets the HBase table.
    *
    * @param htable  The {@link org.apache.hadoop.hbase.HTableDescriptor} to scan.
@@ -145,7 +124,6 @@ public class TableRecordReaderImpl {
       InterruptedException {
     if (context != null) {
       this.context = context;
-      getCounter = retrieveGetCounterWithStringsParams(context);
     }
     restart(scan.getStartRow());
   }
@@ -213,8 +191,7 @@ public class TableRecordReaderImpl {
           rowcount ++;
           if (rowcount >= logPerRowCount) {
             long now = System.currentTimeMillis();
-            LOG.info("Mapper took " + (now-timestamp)
-              + "ms to process " + rowcount + " rows");
+            LOG.info("Mapper took {}ms to process {} rows", (now - timestamp), rowcount);
             timestamp = now;
             rowcount = 0;
           }
@@ -266,8 +243,7 @@ public class TableRecordReaderImpl {
       updateCounters();
       if (logScannerActivity) {
         long now = System.currentTimeMillis();
-        LOG.info("Mapper took " + (now-timestamp)
-          + "ms to process " + rowcount + " rows");
+        LOG.info("Mapper took {}ms to process {} rows", (now - timestamp), rowcount);
         LOG.info(ioe.toString(), ioe);
         String lastRow = lastSuccessfulRow == null ?
           "null" : Bytes.toStringBinary(lastSuccessfulRow);
@@ -283,36 +259,40 @@ public class TableRecordReaderImpl {
    * If hbase runs on old version of mapreduce, it won't be able to get
    * access to counters and TableRecorderReader can't update counter values.
    */
-  private void updateCounters() throws IOException {
+  private void updateCounters() {
     ScanMetrics scanMetrics = scanner.getScanMetrics();
     if (scanMetrics == null) {
       return;
     }
 
-    updateCounters(scanMetrics, numRestarts, getCounter, context, numStale);
+    updateCounters(scanMetrics, numRestarts, context, numStale);
   }
 
   protected static void updateCounters(ScanMetrics scanMetrics, long numScannerRestarts,
-      Method getCounter, TaskAttemptContext context, long numStale) {
+      TaskAttemptContext context, long numStale) {
     // we can get access to counters only if hbase uses new mapreduce APIs
-    if (getCounter == null) {
+    if (context == null) {
       return;
     }
 
-    try {
-      for (Map.Entry<String, Long> entry:scanMetrics.getMetricsMap().entrySet()) {
-        Counter ct = (Counter)getCounter.invoke(context,
-            HBASE_COUNTER_GROUP_NAME, entry.getKey());
-
-        ct.increment(entry.getValue());
+      for (Map.Entry<String, Long> entry : scanMetrics.getMetricsMap().entrySet()) {
+        Counter counter = context.getCounter(HBASE_COUNTER_GROUP_NAME, entry.getKey());
+        if (counter != null) {
+          counter.increment(entry.getValue());
+        }
+      }
+      if (numScannerRestarts != 0L) {
+        Counter counter = context.getCounter(HBASE_COUNTER_GROUP_NAME, "NUM_SCANNER_RESTARTS");
+        if (counter != null) {
+          counter.increment(numScannerRestarts);
+        }
+      }
+      if (numStale != 0L) {
+        Counter counter = context.getCounter(HBASE_COUNTER_GROUP_NAME, "NUM_SCAN_RESULTS_STALE");
+        if (counter != null) {
+          counter.increment(numStale);
+        }
       }
-      ((Counter) getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME,
-          "NUM_SCANNER_RESTARTS")).increment(numScannerRestarts);
-      ((Counter) getCounter.invoke(context, HBASE_COUNTER_GROUP_NAME,
-          "NUM_SCAN_RESULTS_STALE")).increment(numStale);
-    } catch (Exception e) {
-      LOG.debug("can't update counter." + StringUtils.stringifyException(e));
-    }
   }
 
   /**
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java
index d90062f..4016808 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.mapreduce;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
-import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -149,13 +148,11 @@ public class TableSnapshotInputFormat extends InputFormat<ImmutableBytesWritable
     private TableSnapshotInputFormatImpl.RecordReader delegate =
       new TableSnapshotInputFormatImpl.RecordReader();
     private TaskAttemptContext context;
-    private Method getCounter;
 
     @Override
     public void initialize(InputSplit split, TaskAttemptContext context) throws IOException,
         InterruptedException {
       this.context = context;
-      getCounter = TableRecordReaderImpl.retrieveGetCounterWithStringsParams(context);
       delegate.initialize(
         ((TableSnapshotRegionSplit) split).delegate,
         context.getConfiguration());
@@ -167,7 +164,7 @@ public class TableSnapshotInputFormat extends InputFormat<ImmutableBytesWritable
       if (result) {
         ScanMetrics scanMetrics = delegate.getScanner().getScanMetrics();
         if (scanMetrics != null && context != null) {
-          TableRecordReaderImpl.updateCounters(scanMetrics, 0, getCounter, context, 0);
+          TableRecordReaderImpl.updateCounters(scanMetrics, 0, context, 0);
         }
       }
       return result;