You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ps...@apache.org on 2020/04/16 09:26:42 UTC

[hbase-operator-tools] branch master updated: HBASE-24039 HBCK2 feature negotiation to check what commands are supported (#55)

This is an automated email from the ASF dual-hosted git repository.

psomogyi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase-operator-tools.git


The following commit(s) were added to refs/heads/master by this push:
     new 2c6adc5  HBASE-24039 HBCK2 feature negotiation to check what commands are supported (#55)
2c6adc5 is described below

commit 2c6adc5050f013e957d58e47f013e9c77f2e4e1e
Author: richardantal <60...@users.noreply.github.com>
AuthorDate: Thu Apr 16 11:26:35 2020 +0200

    HBASE-24039 HBCK2 feature negotiation to check what commands are supported (#55)
    
    Signed-off-by: Peter Somogyi <ps...@apache.org>
    Signed-off-by: Michael Stack <st...@apache.org>
---
 .../src/main/java/org/apache/hbase/HBCK2.java      | 40 +++++++++++++++++-----
 .../src/test/java/org/apache/hbase/TestHBCK2.java  | 14 ++++++++
 2 files changed, 46 insertions(+), 8 deletions(-)

diff --git a/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java b/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java
index ead69c4..46de2ec 100644
--- a/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java
+++ b/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java
@@ -21,8 +21,10 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.PrintWriter;
 import java.io.StringWriter;
+import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.List;
@@ -94,6 +96,15 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
   private static final String SET_REGION_STATE = "setRegionState";
   private static final String SCHEDULE_RECOVERIES = "scheduleRecoveries";
   private static final String FIX_META = "fixMeta";
+  // TODO update this map in case of the name of a method changes in Hbck interface
+  //  in org.apache.hadoop.hbase.client package. Or a new command is added and the hbck command
+  //  does not equals to the method name in Hbck interface.
+  private static final Map<String, List<String> > FUNCTION_NAME_MAP =
+          Collections.unmodifiableMap(new HashMap<String, List<String>>() {{
+              put(SET_TABLE_STATE, Arrays.asList("setTableStateInMeta"));
+              put(BYPASS, Arrays.asList("bypassProcedure"));
+              put(SCHEDULE_RECOVERIES, Arrays.asList("scheduleServerCrashProcedure",
+                      "scheduleServerCrashProcedures")); }});
 
   private static final String ADD_MISSING_REGIONS_IN_META_FOR_TABLES =
     "addFsRegionsMissingInMeta";
@@ -133,6 +144,20 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     }
   }
 
+  void checkFunctionSupported(ClusterConnection connection, String cmd) throws IOException {
+    if (skipCheck) {
+      LOG.info("Skipped {} command version check; 'skip' set", cmd);
+      return;
+    }
+    List<Method> methods = Arrays.asList(connection.getHbck().getClass().getDeclaredMethods());
+    List<String> finalCmds = FUNCTION_NAME_MAP.getOrDefault(cmd, Collections.singletonList(cmd));
+    boolean supported = methods.stream().anyMatch(method ->  finalCmds.contains(method.getName()));
+    if (!supported) {
+      throw new UnsupportedOperationException("This HBase cluster does not support command: "
+              + cmd);
+    }
+  }
+
   TableState setTableState(Hbck hbck, TableName tableName, TableState.State state)
       throws IOException {
     return hbck.setTableStateInMeta(new TableState(tableName, state));
@@ -324,7 +349,7 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     boolean recursiveFlag = commandLine.hasOption(recursive.getOpt());
     List<Long> pids = Arrays.stream(pidStrs).map(Long::valueOf).collect(Collectors.toList());
     try (ClusterConnection connection = connect(); Hbck hbck = connection.getHbck()) {
-      checkHBCKSupport(connection, BYPASS);
+      checkFunctionSupported(connection, BYPASS);
       return hbck.bypassProcedure(pids, lockWait, overrideFlag, recursiveFlag);
     }
   }
@@ -727,7 +752,7 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
           return EXIT_FAILURE;
         }
         try (ClusterConnection connection = connect(); Hbck hbck = connection.getHbck()) {
-          checkHBCKSupport(connection, command);
+          checkFunctionSupported(connection, command);
           System.out.println(setTableState(hbck, TableName.valueOf(commands[1]),
               TableState.State.valueOf(commands[2])));
         }
@@ -739,7 +764,7 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
           return EXIT_FAILURE;
         }
         try (ClusterConnection connection = connect(); Hbck hbck = connection.getHbck()) {
-          checkHBCKSupport(connection, command);
+          checkFunctionSupported(connection, command);
           System.out.println(assigns(hbck, purgeFirst(commands)));
         }
         break;
@@ -749,7 +774,7 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
           showErrorMessage(command + " takes one or more pids");
           return EXIT_FAILURE;
         }
-        // bypass does the connection setup and the checkHBCKSupport down
+        // bypass does the connection setup and the checkFunctionSupported down
         // inside in the bypass method delaying connection setup until last
         // moment. It does this because it has another set of command options
         // to process and wants to do that before setting up connection.
@@ -768,7 +793,7 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
           return EXIT_FAILURE;
         }
         try (ClusterConnection connection = connect(); Hbck hbck = connection.getHbck()) {
-          checkHBCKSupport(connection, command);
+          checkFunctionSupported(connection, command);
           System.out.println(toString(unassigns(hbck, purgeFirst(commands))));
         }
         break;
@@ -813,7 +838,7 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
           return EXIT_FAILURE;
         }
         try (ClusterConnection connection = connect(); Hbck hbck = connection.getHbck()) {
-          checkHBCKSupport(connection, command, "2.0.3", "2.1.2", "2.2.0", "3.0.0");
+          checkFunctionSupported(connection, command);
           System.out.println(toString(scheduleRecoveries(hbck, purgeFirst(commands))));
         }
         break;
@@ -824,8 +849,7 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
           return EXIT_FAILURE;
         }
         try (ClusterConnection connection = connect(); Hbck hbck = connection.getHbck()) {
-          checkHBCKSupport(connection, command, "2.0.6", "2.1.6", "2.2.1", "2.3.0",
-              "3.0.0");
+          checkFunctionSupported(connection, command);
           hbck.fixMeta();
           System.out.println("Server-side processing of fixMeta triggered.");
         }
diff --git a/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java b/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java
index dd08b0f..51bc7bc 100644
--- a/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java
+++ b/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java
@@ -504,6 +504,20 @@ public class TestHBCK2 {
     assertFalse("Should not contain: " + expectedResult, result.contains(expectedResult));
   }
 
+  @Test
+  public void testFunctionSupported() throws IOException {
+    try (ClusterConnection connection = this.hbck2.connect()) {
+      this.hbck2.checkFunctionSupported(connection, "scheduleRecoveries");
+    }
+  }
+
+  @Test (expected = UnsupportedOperationException.class)
+  public void testFunctionNotSupported() throws IOException {
+    try (ClusterConnection connection = this.hbck2.connect()) {
+      this.hbck2.checkFunctionSupported(connection, "test");
+    }
+  }
+
   private String testFormatExtraRegionsInMetaReport() throws IOException {
     return testFormatExtraRegionsInMeta(new String[]{HBCK2.EXTRA_REGIONS_IN_META });
   }