You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by nd...@apache.org on 2022/04/22 14:02:45 UTC

[hbase-operator-tools] branch master updated: HBASE-24587 hbck2 command should accept one or more files containing a list of region names/table names/namespaces (#105)

This is an automated email from the ASF dual-hosted git repository.

ndimiduk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase-operator-tools.git


The following commit(s) were added to refs/heads/master by this push:
     new 6f0796f  HBASE-24587 hbck2 command should accept one or more files containing a list of region names/table names/namespaces (#105)
6f0796f is described below

commit 6f0796f09a583a6dd3063b5d915b0710da9a0314
Author: clarax <cl...@gmail.com>
AuthorDate: Fri Apr 22 07:02:41 2022 -0700

    HBASE-24587 hbck2 command should accept one or more files containing a list of region names/table names/namespaces (#105)
    
    Signed-off-by: Nick Dimiduk <nd...@apache.org>
    Reviewed-by:  subrat-mishra  <su...@gmail.com>
---
 hbase-hbck2/README.md                              |  69 +++-
 .../src/main/java/org/apache/hbase/HBCK2.java      | 372 +++++++++++++++------
 .../src/test/java/org/apache/hbase/TestHBCK2.java  | 292 +++++++++++-----
 .../apache/hbase/TestHBCKCommandLineParsing.java   |  44 ++-
 4 files changed, 575 insertions(+), 202 deletions(-)

diff --git a/hbase-hbck2/README.md b/hbase-hbck2/README.md
index c8ebb79..14ad004 100644
--- a/hbase-hbck2/README.md
+++ b/hbase-hbck2/README.md
@@ -100,9 +100,10 @@ Options:
  -z,--zookeeper.znode.parent <arg>                parent znode of hbase
                                                   ensemble
 Command:
- addFsRegionsMissingInMeta <NAMESPACE|NAMESPACE:TABLENAME>...
+ addFsRegionsMissingInMeta <NAMESPACE|NAMESPACE:TABLENAME>...|-i <INPUT_FILE>...
    Options:
     -d,--force_disable aborts fix for table if disable fails.
+    -i,--inputFiles  take one or more input files of namespace of table names
    To be used when regions missing from hbase:meta but directories
    are present still in HDFS. Can happen if user has run _hbck1_
    'OfflineMetaRepair' against an hbase-2.x cluster. Needs hbase:meta
@@ -121,6 +122,10 @@ Command:
    namespace 'n2':
      $ HBCK2 addFsRegionsMissingInMeta default:tbl_1 n1:tbl_2 n2
    Returns HBCK2  an 'assigns' command with all re-inserted regions.
+   If -i or --inputFiles is specified, pass one or more input file names.
+   Each file contains <NAMESPACE|NAMESPACE:TABLENAME>, one per line. For example:
+    For example:
+     $ HBCK2 addFsRegionsMissingInMeta -i fileName1 fileName2
    SEE ALSO: reportMissingRegionsInMeta
    SEE ALSO: fixMeta
 
@@ -138,11 +143,13 @@ Command:
    If -i or --inputFiles is specified, pass one or more input file names.
    Each file contains encoded region names, one per line. For example:
      $ HBCK2 assigns -i fileName1 fileName2
+
  bypass [OPTIONS] <PID>...
    Options:
     -o,--override   override if procedure is running/stuck
     -r,--recursive  bypass parent and its children. SLOW! EXPENSIVE!
     -w,--lockWait   milliseconds to wait before giving up; default=1
+    -i,--inputFiles  take one or more input files of PIDs
    Pass one (or more) procedure 'pid's to skip to procedure finish. Parent
    of bypassed procedure will also be skipped to the finish. Entities will
    be left in an inconsistent state and will require manual fixup. May
@@ -150,10 +157,14 @@ Command:
    procedure has children. Add 'recursive' if all you have is a parent pid
    to finish parent and children. This is SLOW, and dangerous so use
    selectively. Does not always work.
+   If -i or --inputFiles is specified, pass one or more input file names.
+   Each file contains PID's, one per line. For example:
+     $ HBCK2 bypass -i fileName1 fileName2
 
- extraRegionsInMeta <NAMESPACE|NAMESPACE:TABLENAME>...
+ extraRegionsInMeta <NAMESPACE|NAMESPACE:TABLENAME>...|-i <INPUT_FILE>...
    Options:
     -f, --fix    fix meta by removing all extra regions found.
+    -i,--inputFiles  take one or more input files of namespace or table names
    Reports regions present on hbase:meta, but with no related
    directories on the file system. Needs hbase:meta to be online.
    For each table name passed as parameter, performs diff
@@ -173,16 +184,23 @@ Command:
      $ HBCK2 extraRegionsInMeta default:table_1 ns1
    Returns list of extra regions for each table passed as parameter, or
    for each table on namespaces specified as parameter.
+   If -i or --inputFiles is specified, pass one or more input file names.
+   Each file contains <NAMESPACE|NAMESPACE:TABLENAME>, one per line. For example:
+     $ HBCK2 extraRegionsInMeta -i fileName1 fileName2
 
- filesystem [OPTIONS] [<TABLENAME>...]
+ filesystem [OPTIONS] [<TABLENAME>...|-i <INPUT_FILE>...]
    Options:
     -f, --fix    sideline corrupt hfiles, bad links, and references.
+    -i,--inputFiles  take one or more input files of table names
    Report on corrupt hfiles, references, broken links, and integrity.
    Pass '--fix' to sideline corrupt files and links. '--fix' does NOT
    fix integrity issues; i.e. 'holes' or 'orphan' regions. Pass one or
    more tablenames to narrow checkup. Default checks all tables and
    restores 'hbase.version' if missing. Interacts with the filesystem
    only! Modified regions need to be reopened to pick-up changes.
+   If -i or --inputFiles is specified, pass one or more input file names.
+   Each file contains <TABLENAME>, one per line. For example:
+     $ HBCK2 extraRegionsInMeta -i fileName1 fileName2
 
  fixMeta
    Do a server-side fix of bad or inconsistent state in hbase:meta.
@@ -220,14 +238,20 @@ Command:
    ServerCrashProcedures to stuck, you might need to fix these still
    after you generated the missing table info files.
 
- replication [OPTIONS] [<TABLENAME>...]
+ replication [OPTIONS] [<TABLENAME>...|-i <INPUT_FILE>...]
    Options:
     -f, --fix    fix any replication issues found.
+    -i,--inputFiles  take one or more input files of table names
    Looks for undeleted replication queues and deletes them if passed the
    '--fix' option. Pass a table name to check for replication barrier and
    purge if '--fix'.
+   If -i or --inputFiles is specified, pass one or more input file names.
+   Each file contains <TABLENAME>, one per line. For example:
+     $ HBCK2 replication -i fileName1 fileName2
 
- reportMissingRegionsInMeta <NAMESPACE|NAMESPACE:TABLENAME>...
+ reportMissingRegionsInMeta <NAMESPACE|NAMESPACE:TABLENAME>...|-i <INPUT_FILE>...
+   Options:
+    -i,--inputFiles  take one or more input files of namespace or table names
    To be used when regions missing from hbase:meta but directories
    are present still in HDFS. Can happen if user has run _hbck1_
    'OfflineMetaRepair' against an hbase-2.x cluster. This is a CHECK only
@@ -252,9 +276,14 @@ Command:
      $ HBCK2 reportMissingRegionsInMeta default:table_1 ns1
    Returns list of missing regions for each table passed as parameter, or
    for each table on namespaces specified as parameter.
+   If -i or --inputFiles is specified, pass one or more input file names.
+   Each file contains <NAMESPACE|NAMESPACE:TABLENAME>, one per line. For example:
+     $ HBCK2 reportMissingRegionsInMeta -i fileName1 fileName2
 
- setRegionState <ENCODED_REGIONNAME> <STATE>
-   Possible region states:
+ setRegionState [<ENCODED_REGIONNAME> <STATE>|-i <INPUT_FILE>...]
+    Options:
+     -i,--inputFiles  take one or more input files of encoded region names and states
+  Possible region states:
     OFFLINE, OPENING, OPEN, CLOSING, CLOSED, SPLITTING, SPLIT,
     FAILED_OPEN, FAILED_CLOSE, MERGING, MERGED, SPLITTING_NEW,
     MERGING_NEW, ABNORMALLY_CLOSED
@@ -270,8 +299,14 @@ Command:
    setting region 'de00010733901a05f5a2a3a382e27dd4' to CLOSING:
      $ HBCK2 setRegionState de00010733901a05f5a2a3a382e27dd4 CLOSING
    Returns "0" if region state changed and "1" otherwise.
+   If -i or --inputFiles is specified, pass one or more input file names.
+   Each file contains <ENCODED_REGIONNAME> <STATE>, one pair per line.
+   For example:
+     $ HBCK2 setRegionState -i fileName1 fileName2
 
- setTableState <TABLENAME> <STATE>
+ setTableState [<TABLENAME> <STATE>|-i <INPUT_FILE>...]
+   Options:
+     -i,--inputFiles  take one or more input files of table names and states
    Possible table states: ENABLED, DISABLED, DISABLING, ENABLING
    To read current table state, in the hbase shell run:
      hbase> get 'hbase:meta', '<TABLENAME>', 'table:state'
@@ -280,8 +315,14 @@ Command:
    An example making table name 'user' ENABLED:
      $ HBCK2 setTableState users ENABLED
    Returns whatever the previous table state was.
+   If -i or --inputFiles is specified, pass one or more input file names.
+   Each file contains <TABLENAME> <STATE>, one pair per line.
+   For example:
+     $ HBCK2 setTableState -i fileName1 fileName2
 
- scheduleRecoveries <SERVERNAME>...
+ scheduleRecoveries <SERVERNAME>...|-i <INPUT_FILE>...
+  Options:
+     -i,--inputFiles  take one or more input files of server names
    Schedule ServerCrashProcedure(SCP) for list of RegionServers. Format
    server name as '<HOSTNAME>,<PORT>,<STARTCODE>' (See HBase UI/logs).
    Example using RegionServer 'a.example.org,29100,1540348649479':
@@ -289,10 +330,14 @@ Command:
    Returns the pid(s) of the created ServerCrashProcedure(s) or -1 if
    no procedure created (see master logs for why not).
    Command support added in hbase versions 2.0.3, 2.1.2, 2.2.0 or newer.
+   If -i or --inputFiles is specified, pass one or more input file names.
+   Each file contains <SERVERNAME>, one per line. For example:
+     $ HBCK2 scheduleRecoveries -i fileName1 fileName2
 
- unassigns <ENCODED_REGIONNAME>...
+ unassigns <ENCODED_REGIONNAME>...|-i <INPUT_FILE>...
    Options:
     -o,--override  override ownership by another procedure
+     -i,--inputFiles  take one or more input files of encoded names
    A 'raw' unassign that can be used even during Master initialization
    (if the -skip flag is specified). Skirts Coprocessors. Pass one or
    more encoded region names. 1588230740 is the hard-coded name for the
@@ -300,7 +345,9 @@ Command:
    of what a userspace encoded region name looks like. For example:
      $ HBCK2 unassign 1588230740 de00010733901a05f5a2a3a382e27dd4
    Returns the pid(s) of the created UnassignProcedure(s) or -1 if none.
-
+   If -i or --inputFiles is specified, pass one or more input file names.
+   Each file contains encoded region names, one per line. For example:
+     $ HBCK2 unassigns fileName1 -i fileName2
    SEE ALSO, org.apache.hbase.hbck1.OfflineMetaRepair, the offline
    hbase:meta tool. See the HBCK2 README for how to use.
 ```
diff --git a/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java b/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java
index 84dc834..a0afc6c 100644
--- a/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java
+++ b/hbase-hbck2/src/main/java/org/apache/hbase/HBCK2.java
@@ -185,6 +185,37 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     }
   }
 
+  void setTableState(Hbck hbck, String[] args) throws IOException {
+    Options options = new Options();
+    Option inputFile = Option.builder("i").longOpt("inputFiles").build();
+    options.addOption(inputFile);
+    CommandLine commandLine = getCommandLine(args, options);
+    if (commandLine == null) {
+      return;
+    }
+    String[] argList = commandLine.getArgs();
+    if(!commandLine.hasOption(inputFile.getOpt())) {
+      System.out.println(setTableStateByArgs(hbck, argList));
+    } else {
+      List<String> inputList = getFromArgsOrFiles(stringArrayToList(argList), true);
+      for (String line : inputList) {
+        String[] params = line.split("\\s+");
+        System.out.println(setTableStateByArgs(hbck, params));
+      }
+    }
+  }
+
+  TableState setTableStateByArgs(Hbck hbck, String[] args) throws IOException {
+    if (args == null || args.length < 2) {
+      showErrorMessage(SET_TABLE_STATE +
+              " takes tablename and state arguments: e.g. user ENABLED, you entered: " +
+              Arrays.toString(args));
+      return null;
+    }
+    return setTableState(hbck, TableName.valueOf(args[0]),
+            TableState.State.valueOf(args[1]));
+  }
+
   TableState setTableState(Hbck hbck, TableName tableName, TableState.State state)
       throws IOException {
     return hbck.setTableStateInMeta(new TableState(tableName, state));
@@ -196,6 +227,43 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     return setRegionState(connection, region, 0, newState);
   }
 
+  int setRegionState(ClusterConnection connection, String[] args) throws IOException {
+    Options options = new Options();
+    Option inputFile = Option.builder("i").longOpt("inputFiles").build();
+    options.addOption(inputFile);
+    CommandLine commandLine = getCommandLine(args, options);
+    if (commandLine == null) {
+      return EXIT_FAILURE;
+    }
+    String[] argList = commandLine.getArgs();
+    if (argList == null) {
+      return EXIT_FAILURE;
+    }
+
+    if(!commandLine.hasOption(inputFile.getOpt())) {
+      String[] params = formatSetRegionStateCommand(argList);
+      return setRegionStateByArgs(connection, params);
+    } else {
+      List<String> inputList = getFromArgsOrFiles(stringArrayToList(argList), true);
+      for (String line : inputList) {
+        String[] params = formatSetRegionStateCommand(line.split("\\s+"));
+        if (setRegionStateByArgs(connection, params) == EXIT_FAILURE) {
+          showErrorMessage("setRegionState failed to set " + Arrays.toString(args));
+        }
+      }
+      return EXIT_SUCCESS;
+    }
+  }
+
+  int setRegionStateByArgs(ClusterConnection connection, String[] args) throws IOException {
+    if (args == null || args.length < 3) {
+      return EXIT_FAILURE;
+    }
+    RegionState.State state = RegionState.State.valueOf(args[2]);
+    int replicaId = Integer.parseInt(args[1]);
+    return setRegionState(connection, args[0], replicaId, state);
+  }
+
   int setRegionState(ClusterConnection connection, String region, int replicaId,
                      RegionState.State newState)
           throws IOException {
@@ -238,13 +306,13 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     return EXIT_FAILURE;
   }
 
-  Map<TableName,List<Path>> reportTablesWithMissingRegionsInMeta(String... nameSpaceOrTable)
+  Map<TableName,List<Path>> reportTablesWithMissingRegionsInMeta(String... args)
       throws IOException {
     Map<TableName,List<Path>> report;
     try (final FsRegionsMetaRecoverer fsRegionsMetaRecoverer =
         new FsRegionsMetaRecoverer(this.conf)) {
       report = fsRegionsMetaRecoverer.reportTablesMissingRegions(
-        formatNameSpaceTableParam(nameSpaceOrTable));
+        (getInputList(args)));
     } catch (IOException e) {
       LOG.error("Error reporting missing regions: ", e);
       throw e;
@@ -260,15 +328,21 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     Options options = new Options();
     Option fixOption = Option.builder("f").longOpt("fix").build();
     options.addOption(fixOption);
+    Option inputFile = Option.builder("i").longOpt("inputFiles").build();
+    options.addOption(inputFile);
+    Map<TableName, List<String>> result = new HashMap<>();
     // Parse command-line.
-    CommandLineParser parser = new DefaultParser();
-    CommandLine commandLine;
-    commandLine = parser.parse(options, args, false);
+    CommandLine commandLine = getCommandLine(args, options);
+    if (commandLine == null) {
+      return result;
+    }
     boolean fix = commandLine.hasOption(fixOption.getOpt());
-    Map<TableName, List<String>> result = new HashMap<>();
+    boolean inputFileFlag = commandLine.hasOption(inputFile.getOpt());
+
     try (final FsRegionsMetaRecoverer fsRegionsMetaRecoverer =
       new FsRegionsMetaRecoverer(this.conf)) {
-      List<String> namespacesTables = formatNameSpaceTableParam(commandLine.getArgs());
+      List<String> namespacesTables =
+              getFromArgsOrFiles(commandLine.getArgList(), inputFileFlag);
       Map<TableName, List<RegionInfo>> reportMap =
         fsRegionsMetaRecoverer.reportTablesExtraRegions(namespacesTables);
       final List<String> toFix = new ArrayList<>();
@@ -305,16 +379,12 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     return result;
   }
 
-  private List<String> formatNameSpaceTableParam(String... nameSpaceOrTable) {
-    return nameSpaceOrTable != null ? Arrays.asList(nameSpaceOrTable) : null;
-  }
-
   List<Future<List<String>>> addMissingRegionsInMetaForTables(String...
       nameSpaceOrTable) throws IOException {
     try (final FsRegionsMetaRecoverer fsRegionsMetaRecoverer =
       new FsRegionsMetaRecoverer(this.conf)) {
       return fsRegionsMetaRecoverer.addMissingRegionsInMetaForTables(
-        formatNameSpaceTableParam(nameSpaceOrTable));
+        getInputList(nameSpaceOrTable));
     } catch (IOException e) {
       LOG.error("Error adding missing regions: ", e);
       throw e;
@@ -328,53 +398,37 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     options.addOption(override);
     options.addOption(inputFile);
     // Parse command-line.
-    CommandLineParser parser = new DefaultParser();
-    CommandLine commandLine;
-    try {
-      commandLine = parser.parse(options, args, false);
-    } catch (ParseException e) {
-      showErrorMessage(e.getMessage());
+    CommandLine commandLine = getCommandLine(args, options);
+    if (commandLine == null) {
       return null;
     }
     boolean overrideFlag = commandLine.hasOption(override.getOpt());
-
+    boolean inputFileFlag = commandLine.hasOption(inputFile.getOpt());
     List<String> argList = commandLine.getArgList();
-    if (!commandLine.hasOption(inputFile.getOpt())) {
-      return hbck.assigns(argList, overrideFlag);
-    }
-    List<String> assignmentList = new ArrayList<>();
-    for (String filePath : argList) {
-      try (InputStream fileStream = new FileInputStream(filePath)){
-        LineIterator it = IOUtils.lineIterator(fileStream, "UTF-8");
-        while (it.hasNext()) {
-          assignmentList.add(it.nextLine().trim());
-        }
-      }
-    }
-    return hbck.assigns(assignmentList, overrideFlag);
+    return hbck.assigns(getFromArgsOrFiles(argList, inputFileFlag),overrideFlag);
   }
 
   List<Long> unassigns(Hbck hbck, String [] args) throws IOException {
     Options options = new Options();
     Option override = Option.builder("o").longOpt("override").build();
+    Option inputFile = Option.builder("i").longOpt("inputFiles").build();
     options.addOption(override);
+    options.addOption(inputFile);
     // Parse command-line.
-    CommandLineParser parser = new DefaultParser();
-    CommandLine commandLine;
-    try {
-      commandLine = parser.parse(options, args, false);
-    } catch (ParseException e) {
-      showErrorMessage(e.getMessage());
+    CommandLine commandLine = getCommandLine(args, options);
+    if (commandLine == null) {
       return null;
     }
     boolean overrideFlag = commandLine.hasOption(override.getOpt());
-    return hbck.unassigns(commandLine.getArgList(), overrideFlag);
+    boolean inputFileFlag = commandLine.hasOption(inputFile.getOpt());
+    List<String> argList = commandLine.getArgList();
+    return hbck.unassigns(getFromArgsOrFiles(argList, inputFileFlag), overrideFlag);
   }
 
   /**
    * @return List of results OR null if failed to run.
    */
-  private List<Boolean> bypass(String[] args) throws IOException {
+  List<Boolean> bypass(String[] args) throws IOException {
     // Bypass has two options....
     Options options = new Options();
     // See usage for 'help' on these options.
@@ -384,27 +438,29 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     options.addOption(recursive);
     Option wait = Option.builder("w").longOpt("lockWait").hasArg().type(Integer.class).build();
     options.addOption(wait);
+    Option inputFile = Option.builder("i").longOpt("inputFiles").build();
+    options.addOption(inputFile);
     // Parse command-line.
-    CommandLineParser parser = new DefaultParser();
-    CommandLine commandLine;
-    try {
-      commandLine = parser.parse(options, args, false);
-    } catch (ParseException e) {
-      showErrorMessage(e.getMessage());
+    CommandLine commandLine = getCommandLine(args, options);
+    if (commandLine == null) {
       return null;
     }
     long lockWait = DEFAULT_LOCK_WAIT;
     if (commandLine.hasOption(wait.getOpt())) {
       lockWait = Integer.parseInt(commandLine.getOptionValue(wait.getOpt()));
     }
-    String[] pidStrs = commandLine.getArgs();
+    boolean overrideFlag = commandLine.hasOption(override.getOpt());
+    boolean recursiveFlag = commandLine.hasOption(recursive.getOpt());
+    boolean inputFileFlag = commandLine.hasOption(inputFile.getOpt());
+
+    String[] pidStrs = getFromArgsOrFiles(commandLine.getArgList(), inputFileFlag)
+            .toArray(new String[0]);
     if (pidStrs == null || pidStrs.length <= 0) {
       showErrorMessage("No pids supplied.");
       return null;
     }
-    boolean overrideFlag = commandLine.hasOption(override.getOpt());
-    boolean recursiveFlag = commandLine.hasOption(recursive.getOpt());
     List<Long> pids = Arrays.stream(pidStrs).map(Long::valueOf).collect(Collectors.toList());
+
     try (ClusterConnection connection = connect(); Hbck hbck = connection.getHbck()) {
       checkFunctionSupported(connection, BYPASS);
       return hbck.bypassProcedure(pids, lockWait, overrideFlag, recursiveFlag);
@@ -413,8 +469,11 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
 
   List<Long> scheduleRecoveries(Hbck hbck, String[] args) throws IOException {
     List<HBaseProtos.ServerName> serverNames = new ArrayList<>();
-    for (String serverName: args) {
-      serverNames.add(parseServerName(serverName));
+    List<String> inputList = getInputList(args);
+    if (inputList != null) {
+      for (String serverName : inputList) {
+        serverNames.add(parseServerName(serverName));
+      }
     }
     return hbck.scheduleServerCrashProcedure(serverNames);
   }
@@ -497,10 +556,11 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
   }
 
   private static void usageAddFsRegionsMissingInMeta(PrintWriter writer) {
-    writer.println(" " + ADD_MISSING_REGIONS_IN_META_FOR_TABLES + " <NAMESPACE|"
-        + "NAMESPACE:TABLENAME>...");
+    writer.println(" " + ADD_MISSING_REGIONS_IN_META_FOR_TABLES + " [<NAMESPACE|"
+        + "NAMESPACE:TABLENAME>...|-i <INPUTFILES>...]");
     writer.println("   Options:");
     writer.println("    -d,--force_disable aborts fix for table if disable fails.");
+    writer.println("    -i,--inputFiles  take one or more files of namespace or table names");
     writer.println("   To be used when regions missing from hbase:meta but directories");
     writer.println("   are present still in HDFS. Can happen if user has run _hbck1_");
     writer.println("   'OfflineMetaRepair' against an hbase-2.x cluster. Needs hbase:meta");
@@ -522,10 +582,15 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     writer.println("   Returns HBCK2  an 'assigns' command with all re-inserted regions.");
     writer.println("   SEE ALSO: " + REPORT_MISSING_REGIONS_IN_META);
     writer.println("   SEE ALSO: " + FIX_META);
+    writer.println("   If -i or --inputFiles is specified, pass one or more input file names.");
+    writer.println("   Each file contains <NAMESPACE|NAMESPACE:TABLENAME>, one per line.");
+    writer.println("   For example:");
+    writer.println("     $ HBCK2 " + ADD_MISSING_REGIONS_IN_META_FOR_TABLES +
+            " -i fileName1 fileName2");
   }
 
   private static void usageAssigns(PrintWriter writer) {
-    writer.println(" " + ASSIGNS + " [OPTIONS] <ENCODED_REGIONNAME/INPUTFILES_FOR_REGIONNAMES>...");
+    writer.println(" " + ASSIGNS + " [OPTIONS] [<ENCODED_REGIONNAME>...|-i <INPUT_FILE>...]");
     writer.println("   Options:");
     writer.println("    -o,--override  override ownership by another procedure");
     writer.println("    -i,--inputFiles  take one or more files of encoded region names");
@@ -542,11 +607,12 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
   }
 
   private static void usageBypass(PrintWriter writer) {
-    writer.println(" " + BYPASS + " [OPTIONS] <PID>...");
+    writer.println(" " + BYPASS + " [OPTIONS] [<PID>...|-i <INPUT_FILE>...]");
     writer.println("   Options:");
     writer.println("    -o,--override   override if procedure is running/stuck");
     writer.println("    -r,--recursive  bypass parent and its children. SLOW! EXPENSIVE!");
     writer.println("    -w,--lockWait   milliseconds to wait before giving up; default=1");
+    writer.println("    -i,--inputFiles  take one or more files of pids");
     writer.println("   Pass one (or more) procedure 'pid's to skip to procedure finish. Parent");
     writer.println("   of bypassed procedure will also be skipped to the finish. Entities will");
     writer.println("   be left in an inconsistent state and will require manual fixup. May");
@@ -554,18 +620,25 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     writer.println("   procedure has children. Add 'recursive' if all you have is a parent pid");
     writer.println("   to finish parent and children. This is SLOW, and dangerous so use");
     writer.println("   selectively. Does not always work.");
+    writer.println("   If -i or --inputFiles is specified, pass one or more input file names.");
+    writer.println("   Each file contains pids, one per line. For example:");
+    writer.println("     $ HBCK2 " + BYPASS + " -i fileName1 fileName2");
   }
 
   private static void usageFilesystem(PrintWriter writer) {
-    writer.println(" " + FILESYSTEM + " [OPTIONS] [<TABLENAME>...]");
+    writer.println(" " + FILESYSTEM + " [OPTIONS] [<TABLENAME>...|-i <INPUT_FILE>...]");
     writer.println("   Options:");
     writer.println("    -f, --fix    sideline corrupt hfiles, bad links, and references.");
+    writer.println("    -i,--inputFiles  take one or more files of table names");
     writer.println("   Report on corrupt hfiles, references, broken links, and integrity.");
     writer.println("   Pass '--fix' to sideline corrupt files and links. '--fix' does NOT");
     writer.println("   fix integrity issues; i.e. 'holes' or 'orphan' regions. Pass one or");
     writer.println("   more tablenames to narrow checkup. Default checks all tables and");
     writer.println("   restores 'hbase.version' if missing. Interacts with the filesystem");
     writer.println("   only! Modified regions need to be reopened to pick-up changes.");
+    writer.println("   If -i or --inputFiles is specified, pass one or more input file names.");
+    writer.println("   Each file contains table names, one per line. For example:");
+    writer.println("     $ HBCK2 " + FILESYSTEM + " -i fileName1 fileName2");
   }
 
   private static void usageFixMeta(PrintWriter writer) {
@@ -610,19 +683,24 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
   }
 
   private static void usageReplication(PrintWriter writer) {
-    writer.println(" " + REPLICATION + " [OPTIONS] [<TABLENAME>...]");
+    writer.println(" " + REPLICATION + " [OPTIONS] [<TABLENAME>...|-i <INPUT_FILE>...]");
     writer.println("   Options:");
     writer.println("    -f, --fix    fix any replication issues found.");
+    writer.println("    -i,--inputFiles  take one or more files of table names");
     writer.println("   Looks for undeleted replication queues and deletes them if passed the");
     writer.println("   '--fix' option. Pass a table name to check for replication barrier and");
     writer.println("   purge if '--fix'.");
+    writer.println("   If -i or --inputFiles is specified, pass one or more input file names.");
+    writer.println("   Each file contains table names, one per line. For example:");
+    writer.println("     $ HBCK2 " + REPLICATION + " -i fileName1 fileName2");
   }
 
   private static void usageExtraRegionsInMeta(PrintWriter writer) {
-    writer.println(" " + EXTRA_REGIONS_IN_META + " <NAMESPACE|"
-      + "NAMESPACE:TABLENAME>...");
+    writer.println(" " + EXTRA_REGIONS_IN_META + " [<NAMESPACE|"
+      + "NAMESPACE:TABLENAME>...|-i <INPUT_FILE>...]");
     writer.println("   Options:");
     writer.println("    -f, --fix    fix meta by removing all extra regions found.");
+    writer.println("    -i,--inputFiles  take one or more files of namespace or table names");
     writer.println("   Reports regions present on hbase:meta, but with no related ");
     writer.println("   directories on the file system. Needs hbase:meta to be online. ");
     writer.println("   For each table name passed as parameter, performs diff");
@@ -643,11 +721,17 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     writer.println("     $ HBCK2 " + EXTRA_REGIONS_IN_META + " default:table_1 ns1");
     writer.println("   Returns list of extra regions for each table passed as parameter, or");
     writer.println("   for each table on namespaces specified as parameter.");
+    writer.println("   If -i or --inputFiles is specified, pass one or more input file names.");
+    writer.println("   Each file contains <NAMESPACE|NAMESPACE:TABLENAME>, one per line.");
+    writer.println("   For example:");
+    writer.println("     $ HBCK2 " + EXTRA_REGIONS_IN_META + " -i fileName1 fileName2");
   }
 
   private static void usageReportMissingRegionsInMeta(PrintWriter writer) {
-    writer.println(" " + REPORT_MISSING_REGIONS_IN_META + " <NAMESPACE|"
-        + "NAMESPACE:TABLENAME>...");
+    writer.println(" " + REPORT_MISSING_REGIONS_IN_META + " [<NAMESPACE|"
+        + "NAMESPACE:TABLENAME>...|-i <INPUT_FILE>...]");
+    writer.println("   Options:");
+    writer.println("    -i,--inputFiles  take one or more files of encoded region names");
     writer.println("   To be used when regions missing from hbase:meta but directories");
     writer.println("   are present still in HDFS. Can happen if user has run _hbck1_");
     writer.println("   'OfflineMetaRepair' against an hbase-2.x cluster. This is a CHECK only");
@@ -673,10 +757,18 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     writer.println("     $ HBCK2 reportMissingRegionsInMeta default:table_1 ns1");
     writer.println("   Returns list of missing regions for each table passed as parameter, or");
     writer.println("   for each table on namespaces specified as parameter.");
+    writer.println("   If -i or --inputFiles is specified, pass one or more input file names.");
+    writer.println("   Each file contains <NAMESPACE|NAMESPACE:TABLENAME>, one per line.);" +
+            "For example:");
+    writer.println("     $ HBCK2 " + REPORT_MISSING_REGIONS_IN_META + " -i fileName1 fileName2");
   }
 
   private static void usageSetRegionState(PrintWriter writer) {
-    writer.println(" " + SET_REGION_STATE + " <ENCODED_REGIONNAME> <STATE>");
+    writer.println(" " + SET_REGION_STATE + " [<ENCODED_REGIONNAME> <STATE>"
+            +"|-i <INPUT_FILE>...]");
+    writer.println("   Options:");
+    writer.println("    -i,--inputFiles  take one or more files of encoded region names " +
+                    "and states");
     writer.println("   To set the replica region's state, it needs the primary region's ");
     writer.println("   encoded regionname and replica id. The command will be ");
     writer.println(" " + SET_REGION_STATE + " <PRIMARY_ENCODED_REGIONNAME>,<replicaId> <STATE>");
@@ -696,10 +788,16 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     writer.println("   setting region 'de00010733901a05f5a2a3a382e27dd4' to CLOSING:");
     writer.println("     $ HBCK2 setRegionState de00010733901a05f5a2a3a382e27dd4 CLOSING");
     writer.println("   Returns \"0\" if region state changed and \"1\" otherwise.");
+    writer.println("   If -i or --inputFiles is specified, pass one or more input file names.");
+    writer.println("   Each file contains <ENCODED_REGIONNAME> <STATE>, one pair per line.);" +
+            "For example:");
+    writer.println("     $ HBCK2 " + SET_REGION_STATE + " -i fileName1 fileName2");
   }
 
   private static void usageSetTableState(PrintWriter writer) {
-    writer.println(" " + SET_TABLE_STATE + " <TABLENAME> <STATE>");
+    writer.println(" " + SET_TABLE_STATE + " [<TABLENAME> <STATE>|-i <INPUT_FILE>...]");
+    writer.println("   Options:");
+    writer.println("    -i,--inputFiles  take one or more files of table names and states");
     writer.println("   Possible table states: " + Arrays.stream(TableState.State.values()).
         map(Enum::toString).collect(Collectors.joining(", ")));
     writer.println("   To read current table state, in the hbase shell run:");
@@ -709,10 +807,16 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     writer.println("   An example making table name 'user' ENABLED:");
     writer.println("     $ HBCK2 setTableState users ENABLED");
     writer.println("   Returns whatever the previous table state was.");
+    writer.println("   If -i or --inputFiles is specified, pass one or more input file names.");
+    writer.println("   Each file contains <TABLENAME> <STATE>, one pair per line.);" +
+            "For example:");
+    writer.println("     $ HBCK2 " + SET_TABLE_STATE + " -i fileName1 fileName2");
   }
 
   private static void usageScheduleRecoveries(PrintWriter writer) {
-    writer.println(" " + SCHEDULE_RECOVERIES + " <SERVERNAME>...");
+    writer.println(" " + SCHEDULE_RECOVERIES + " [<SERVERNAME>...|-i <INPUT_FILE>...]");
+    writer.println("   Options:");
+    writer.println("    -i,--inputFiles  take one or more files of server names");
     writer.println("   Schedule ServerCrashProcedure(SCP) for list of RegionServers. Format");
     writer.println("   server name as '<HOSTNAME>,<PORT>,<STARTCODE>' (See HBase UI/logs).");
     writer.println("   Example using RegionServer 'a.example.org,29100,1540348649479':");
@@ -720,6 +824,9 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     writer.println("   Returns the pid(s) of the created ServerCrashProcedure(s) or -1 if");
     writer.println("   no procedure created (see master logs for why not).");
     writer.println("   Command support added in hbase versions 2.0.3, 2.1.2, 2.2.0 or newer.");
+    writer.println("   If -i or --inputFiles is specified, pass one or more input file names.");
+    writer.println("   Each file contains server names, one per line. For example:");
+    writer.println("     $ HBCK2 " + SCHEDULE_RECOVERIES + " -i fileName1 fileName2");
   }
 
   private static void usageRecoverUnknown(PrintWriter writer) {
@@ -733,9 +840,10 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
   }
 
   private static void usageUnassigns(PrintWriter writer) {
-    writer.println(" " + UNASSIGNS + " <ENCODED_REGIONNAME>...");
+    writer.println(" " + UNASSIGNS + " [<ENCODED_REGIONNAME>...|-i <INPUT_FILE>...]");
     writer.println("   Options:");
     writer.println("    -o,--override  override ownership by another procedure");
+    writer.println("    -i,--inputFiles  take one or more files of encoded region names");
     writer.println("   A 'raw' unassign that can be used even during Master initialization");
     writer.println("   (if the -skip flag is specified). Skirts Coprocessors. Pass one or");
     writer.println("   more encoded region names. 1588230740 is the hard-coded name for the");
@@ -746,6 +854,9 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     writer.println();
     writer.println("   SEE ALSO, org.apache.hbase.hbck1.OfflineMetaRepair, the offline");
     writer.println("   hbase:meta tool. See the HBCK2 README for how to use.");
+    writer.println("   If -i or --inputFiles is specified, pass one or more input file names.");
+    writer.println("   Each file contains encoded region names, one per line. For example:");
+    writer.println("     $ HBCK2 " + UNASSIGNS + " -i fileName1 fileName2");
   }
 
   private static void usageRegioninfoMismatch(PrintWriter writer) {
@@ -892,14 +1003,14 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
       // Case handlers all have same format. Check first that the server supports
       // the feature FIRST, then move to process the command.
       case SET_TABLE_STATE:
-        if (commands.length < 3) {
-          showErrorMessage(command + " takes tablename and state arguments: e.g. user ENABLED");
+        if (commands.length < 2) {
+          showErrorMessage(command +
+            " takes tablename and state arguments: e.g. user ENABLED, or a list of input files");
           return EXIT_FAILURE;
         }
         try (ClusterConnection connection = connect(); Hbck hbck = connection.getHbck()) {
           checkFunctionSupported(connection, command);
-          System.out.println(setTableState(hbck, TableName.valueOf(commands[1]),
-              TableState.State.valueOf(commands[2])));
+          setTableState(hbck, purgeFirst(commands));
         }
         break;
 
@@ -944,52 +1055,34 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
         break;
 
       case SET_REGION_STATE:
-        if (commands.length < 3) {
+        if (commands.length < 2) {
           showErrorMessage(command + " takes region encoded name and state arguments: e.g. "
-              + "35f30b0ce922c34bf5c284eff33ba8b3 CLOSING");
+              + "35f30b0ce922c34bf5c284eff33ba8b3 CLOSING, or a list of input files");
           return EXIT_FAILURE;
         }
-        RegionState.State state = RegionState.State.valueOf(commands[2]);
-
-        int replicaId = 0;
-        String region = commands[1];
-        int separatorIndex = commands[1].indexOf(",");
-        if (separatorIndex > 0) {
-          region = commands[1].substring(0, separatorIndex);
-          replicaId = Integer.getInteger(commands[1].substring(separatorIndex + 1));
-        }
-
-        if (replicaId > 0) {
-          System.out.println("Change state for replica reigon " + replicaId  +
-                  " for primary region " + region);
-        }
 
         try (ClusterConnection connection = connect()) {
           checkHBCKSupport(connection, command);
-          return setRegionState(connection, region, replicaId, state);
+          return setRegionState(connection, purgeFirst(commands));
         }
 
       case FILESYSTEM:
         try (ClusterConnection connection = connect()) {
           checkHBCKSupport(connection, command);
           try (FileSystemFsck fsfsck = new FileSystemFsck(getConf())) {
-            if (fsfsck.fsck(purgeFirst(commands)) != 0) {
-              return EXIT_FAILURE;
-            }
+            return fsfsck.fsck(getInputList(purgeFirst(commands))
+                    .toArray(new String[0])) != 0? EXIT_FAILURE : EXIT_SUCCESS;
           }
         }
-        break;
 
       case REPLICATION:
         try (ClusterConnection connection = connect()) {
           checkHBCKSupport(connection, command, "2.1.1", "2.2.0", "3.0.0");
           try (ReplicationFsck replicationFsck = new ReplicationFsck(getConf())) {
-            if (replicationFsck.fsck(purgeFirst(commands)) != 0) {
-              return EXIT_FAILURE;
-            }
+            return replicationFsck.fsck(getInputList(purgeFirst(commands))
+                    .toArray(new String[0])) != 0? EXIT_FAILURE : EXIT_SUCCESS;
           }
         }
-        break;
 
       case SCHEDULE_RECOVERIES:
         if (commands.length < 2) {
@@ -1191,6 +1284,31 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
     return result;
   }
 
+  /**
+   * @return arguements for SET_REGION_STATE command
+   */
+  private String[] formatSetRegionStateCommand(String[] commands) {
+    if (commands.length < 2) {
+      showErrorMessage("setRegionState takes region encoded name and state arguments: e.g. "
+              + "35f30b0ce922c34bf5c284eff33ba8b3 CLOSING");
+      return null;
+    }
+    RegionState.State state = RegionState.State.valueOf(commands[1]);
+    Integer replicaId = 0;
+    String region = commands[0];
+    int separatorIndex = commands[0].indexOf(",");
+    if (separatorIndex > 0) {
+      region = commands[0].substring(0, separatorIndex);
+      replicaId = Integer.getInteger(commands[0].substring(separatorIndex + 1));
+    }
+    if (replicaId > 0) {
+      System.out.println("Change state for replica reigon " + replicaId +
+              " for primary region " + region);
+    }
+
+    return new String[]{region, replicaId.toString(), state.name()};
+  }
+
   HBCK2(Configuration conf) {
     super(conf);
   }
@@ -1202,4 +1320,70 @@ public class HBCK2 extends Configured implements org.apache.hadoop.util.Tool {
       System.exit(errCode);
     }
   }
+
+  private List<String> stringArrayToList(String... nameSpaceOrTable) {
+    return nameSpaceOrTable != null ? Arrays.asList(nameSpaceOrTable) : null;
+  }
+
+  /**
+   * Get list of input if no other options
+   * @param args Array of arguments
+   * @return the list of input from arguments or parsed from input files
+   */
+  private List<String> getInputList(String[] args) throws IOException {
+    if (args == null) {
+      return null;
+    }
+    Options options = new Options();
+    Option inputFile = Option.builder("i").longOpt("inputFiles").build();
+    options.addOption(inputFile);
+    CommandLine commandLine = getCommandLine(args, options);
+    if (commandLine == null) {
+      return null;
+    }
+    return getFromArgsOrFiles(commandLine.getArgList(),
+            commandLine.hasOption(inputFile.getOpt()));
+  }
+
+  /**
+   * Get a commandLine object with options and a arg list
+   */
+  private CommandLine getCommandLine(String[] args, Options options) {
+    // Parse command-line.
+    CommandLineParser parser = new DefaultParser();
+    CommandLine commandLine;
+    try {
+      commandLine = parser.parse(options, args, false);
+    } catch (ParseException e) {
+      showErrorMessage(e.getMessage());
+      return null;
+    }
+    return commandLine;
+  }
+  /**
+   * @return Read arguments from args or a list of input files
+   */
+  private List<String> getFromArgsOrFiles(List<String> args, boolean getFromFile)
+          throws IOException {
+    if (!getFromFile || args == null) {
+      return args;
+    }
+    return getFromFiles(args);
+  }
+
+  /**
+   * @return Read arguments from a list of input files
+   */
+  private List<String> getFromFiles(List<String> args) throws IOException {
+    List<String> argList = new ArrayList<>();
+    for (String filePath : args) {
+      try (InputStream fileStream = new FileInputStream(filePath)){
+        LineIterator it = IOUtils.lineIterator(fileStream, "UTF-8");
+        while (it.hasNext()) {
+          argList.add(it.nextLine().trim());
+        }
+      }
+    }
+    return argList;
+  }
 }
diff --git a/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java b/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java
index 840f9be..09f9559 100644
--- a/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java
+++ b/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCK2.java
@@ -19,6 +19,7 @@ package org.apache.hbase;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -73,9 +74,12 @@ public class TestHBCK2 {
   private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
   private static final TableName TABLE_NAME = TableName.valueOf(TestHBCK2.class.getSimpleName());
   private static final TableName REGION_STATES_TABLE_NAME = TableName.
-    valueOf(TestHBCK2.class.getSimpleName() + "-REGIONS_STATES");
+          valueOf(TestHBCK2.class.getSimpleName() + "-REGIONS_STATES");
   private final static String ASSIGNS = "assigns";
   private static final String EXTRA_REGIONS_IN_META = "extraRegionsInMeta";
+  private final static String UNASSIGNS = "unassigns";
+  private final static String SET_REGION_STATE = "setRegionState";
+  private final static String SET_TABLE_STATE = "setTableState";
 
   @Rule
   public TestName testName = new TestName();
@@ -106,7 +110,7 @@ public class TestHBCK2 {
     TEST_UTIL.deleteTable(TABLE_NAME);
   }
 
-  @Test (expected = UnsupportedOperationException.class)
+  @Test(expected = UnsupportedOperationException.class)
   public void testVersions() throws IOException {
     try (ClusterConnection connection = this.hbck2.connect()) {
       this.hbck2.checkHBCKSupport(connection, "test", "10.0.0");
@@ -121,6 +125,43 @@ public class TestHBCK2 {
       // Restore the state.
       state = this.hbck2.setTableState(hbck, TABLE_NAME, state.getState());
       assertTrue("Found=" + state.getState(), state.isDisabled());
+
+      // Test the new method with arg list
+      String[] args = new String[]{TABLE_NAME.getNameAsString(), "DISABLED"};
+      state = this.hbck2.setTableStateByArgs(hbck, args);
+      assertTrue("Found=" + state.getState(), state.isEnabled());
+    }
+  }
+
+  @Test
+  public void testSetTableStateWithInputFiles() throws IOException {
+    File testFile = new File(TEST_UTIL.getDataTestDir().toString(), "inputForSetTableTest");
+    writeStringsToAFile(testFile, new String[]{TABLE_NAME.getNameAsString() + " DISABLED"});
+    String result = testRunWithArgs(new String[]{SET_TABLE_STATE, "-i", testFile.toString()});
+    assertTrue(result.contains("tableName=TestHBCK2, state=ENABLED"));
+
+    // Restore the state.
+    try (ClusterConnection connection = this.hbck2.connect(); Hbck hbck = connection.getHbck()) {
+      TableState state = this.hbck2.setTableState(hbck, TABLE_NAME, TableState.State.ENABLED);
+      assertTrue("Found=" + state.getState(), state.isDisabled());
+    }
+  }
+
+  @Test
+  public void testUnAssigns() throws IOException {
+    try (Admin admin = TEST_UTIL.getConnection().getAdmin()) {
+      List<RegionInfo> regions = admin.getRegions(TABLE_NAME);
+      for (RegionInfo ri : regions) {
+        RegionState rs = TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager().
+                getRegionStates().getRegionState(ri.getEncodedName());
+        LOG.info("RS: {}", rs.toString());
+      }
+      String[] regionStrsArray =
+              regions.stream().map(RegionInfo::getEncodedName).toArray(String[]::new);
+      File testFile = new File(TEST_UTIL.getDataTestDir().toString(), "inputForUnAssignsTest");
+      writeStringsToAFile(testFile, regionStrsArray);
+      String result = testRunWithArgs(new String[]{UNASSIGNS, "-i", testFile.toString()});
+      validateRegionEndState(getPidsFromResult(result), regions, false);
     }
   }
 
@@ -128,19 +169,18 @@ public class TestHBCK2 {
   public void testAssigns() throws IOException {
     try (Admin admin = TEST_UTIL.getConnection().getAdmin()) {
       List<RegionInfo> regions = admin.getRegions(TABLE_NAME);
-      for (RegionInfo ri: regions) {
+      for (RegionInfo ri : regions) {
         RegionState rs = TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager().
-            getRegionStates().getRegionState(ri.getEncodedName());
+                getRegionStates().getRegionState(ri.getEncodedName());
         LOG.info("RS: {}", rs.toString());
       }
-      String [] regionStrsArray  =
-          regions.stream().map(RegionInfo::getEncodedName).toArray(String[]::new);
+      String[] regionStrsArray =
+              regions.stream().map(RegionInfo::getEncodedName).toArray(String[]::new);
 
       try (ClusterConnection connection = this.hbck2.connect(); Hbck hbck = connection.getHbck()) {
         unassigns(regions, regionStrsArray);
         List<Long> pids = this.hbck2.assigns(hbck, regionStrsArray);
-        waitOnPids(pids);
-        validateOpen(regions);
+        validateRegionEndState(pids, regions, true);
         // What happens if crappy region list passed?
         pids = this.hbck2.assigns(hbck, Arrays.stream(new String[]{"a", "some rubbish name"}).
                 collect(Collectors.toList()).toArray(new String[]{}));
@@ -148,23 +188,26 @@ public class TestHBCK2 {
           assertEquals(org.apache.hadoop.hbase.procedure2.Procedure.NO_PROC_ID, pid);
         }
 
-        // test input files
+        // Test input files
         unassigns(regions, regionStrsArray);
         File testFile = new File(TEST_UTIL.getDataTestDir().toString(), "inputForAssignsTest");
-        try (FileOutputStream output = new FileOutputStream(testFile, false)) {
-          for (String regionStr : regionStrsArray) {
-            output.write((regionStr + System.lineSeparator()).getBytes());
-          }
-        }
+        writeStringsToAFile(testFile, regionStrsArray);
         String result = testRunWithArgs(new String[]{ASSIGNS, "-i", testFile.toString()});
-        Scanner scanner = new Scanner(result).useDelimiter("[\\D]+");
-        pids = new ArrayList<>();
-        while (scanner.hasNext()) {
-          pids.add(scanner.nextLong());
+        validateRegionEndState(getPidsFromResult(result), regions, true);
+
+        // Test multiple input files
+        unassigns(regions, regionStrsArray);
+        List<String> params = new ArrayList<>();
+        params.add(ASSIGNS);
+        params.add("-i");
+        for (String regionStr : regionStrsArray) {
+          File tempTestFile = new File(TEST_UTIL.getDataTestDir().toString(),
+                  "inputForAssignsTest-" + regionStr);
+          writeStringsToAFile(tempTestFile, new String[]{regionStr});
+          params.add(tempTestFile.toString());
         }
-        scanner.close();
-        waitOnPids(pids);
-        validateOpen(regions);
+        result = testRunWithArgs(params.toArray(new String[]{}));
+        validateRegionEndState(getPidsFromResult(result), regions, true);
       }
     }
   }
@@ -186,6 +229,49 @@ public class TestHBCK2 {
     }
   }
 
+  @Test
+  public void testSetRegionStateWithArgsList() throws IOException {
+    TEST_UTIL.createTable(REGION_STATES_TABLE_NAME, Bytes.toBytes("family1"));
+    try (Admin admin = TEST_UTIL.getConnection().getAdmin()) {
+      List<RegionInfo> regions = admin.getRegions(REGION_STATES_TABLE_NAME);
+      RegionInfo info = regions.get(0);
+      assertEquals(RegionState.State.OPEN, getCurrentRegionState(info));
+      String region = info.getEncodedName();
+      String[] args = new String[]{region, "0", "CLOSING"};
+      try (ClusterConnection connection = this.hbck2.connect()) {
+        this.hbck2.setRegionStateByArgs(connection, args);
+      }
+      assertEquals(RegionState.State.CLOSING, getCurrentRegionState(info));
+    } finally {
+      TEST_UTIL.deleteTable(REGION_STATES_TABLE_NAME);
+    }
+  }
+
+  @Test
+  public void testSetRegionStateInputFiles() throws IOException {
+    TEST_UTIL.createTable(REGION_STATES_TABLE_NAME, Bytes.toBytes("family1"));
+    try (Admin admin = TEST_UTIL.getConnection().getAdmin()) {
+      List<RegionInfo> regions = admin.getRegions(REGION_STATES_TABLE_NAME);
+      String[] input = new String[regions.size()];
+      for (int i = 0; i < regions.size(); i++) {
+        RegionInfo info = regions.get(i);
+        assertEquals(RegionState.State.OPEN, getCurrentRegionState(info));
+        String region = info.getEncodedName();
+        input[i] = region + " CLOSING";
+      }
+
+      File testFile = new File(TEST_UTIL.getDataTestDir().toString(), "inputForSetRegionStateTest");
+      writeStringsToAFile(testFile, input);
+      testRunWithArgs(new String[]{SET_REGION_STATE, "-i", testFile.toString()});
+
+      for (RegionInfo info : regions) {
+        assertEquals(RegionState.State.CLOSING, getCurrentRegionState(info));
+      }
+    } finally {
+      TEST_UTIL.deleteTable(REGION_STATES_TABLE_NAME);
+    }
+  }
+
   @Test
   public void testSetReplicaRegionState() throws IOException, InterruptedException {
     TEST_UTIL.createTable(REGION_STATES_TABLE_NAME, Bytes.toBytes("family1"));
@@ -214,11 +300,11 @@ public class TestHBCK2 {
   public void testSetRegionStateInvalidRegion() throws IOException {
     try (ClusterConnection connection = this.hbck2.connect()) {
       assertEquals(HBCK2.EXIT_FAILURE, this.hbck2.setRegionState(connection, "NO_REGION",
-          RegionState.State.CLOSING));
+              RegionState.State.CLOSING));
     }
   }
 
-  @Test (expected = IllegalArgumentException.class)
+  @Test(expected = IllegalArgumentException.class)
   public void testSetRegionStateInvalidState() throws IOException {
     TEST_UTIL.createTable(REGION_STATES_TABLE_NAME, Bytes.toBytes("family1"));
     try (Admin admin = TEST_UTIL.getConnection().getAdmin()) {
@@ -236,40 +322,40 @@ public class TestHBCK2 {
 
   @Test
   public void testAddMissingRegionsInMetaAllRegionsMissing() throws Exception {
-    this.testAddMissingRegionsInMetaForTables(5,5);
+    this.testAddMissingRegionsInMetaForTables(5, 5);
   }
 
   @Test
   public void testAddMissingRegionsInMetaTwoMissingOnly() throws Exception {
-    this.testAddMissingRegionsInMetaForTables(2,5);
+    this.testAddMissingRegionsInMetaForTables(2, 5);
   }
 
   @Test
   public void testReportMissingRegionsInMetaAllNsTbls() throws Exception {
     String[] nullArgs = null;
     this.testReportMissingRegionsInMeta(5, 5,
-      nullArgs);
+            nullArgs);
   }
 
   @Test
   public void testReportMissingRegionsInMetaSpecificTbl() throws Exception {
     this.testReportMissingRegionsInMeta(5, 5,
-      TABLE_NAME.getNameWithNamespaceInclAsString());
+            TABLE_NAME.getNameWithNamespaceInclAsString());
   }
 
   @Test
   public void testReportMissingRegionsInMetaSpecificTblAndNsTbl() throws Exception {
     this.testReportMissingRegionsInMeta(5, 5,
-      TABLE_NAME.getNameWithNamespaceInclAsString(), "hbase:namespace");
+            TABLE_NAME.getNameWithNamespaceInclAsString(), "hbase:namespace");
   }
 
   @Test
   public void testReportMissingRegionsInMetaSpecificTblAndNsTblAlsoMissing() throws Exception {
     List<RegionInfo> regions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), TableName.valueOf("hbase:namespace"));
+            .getTableRegions(TEST_UTIL.getConnection(), TableName.valueOf("hbase:namespace"));
     HBCKMetaTableAccessor.deleteRegionInfo(TEST_UTIL.getConnection(), regions.get(0));
     this.testReportMissingRegionsInMeta(5, 6,
-      TABLE_NAME.getNameWithNamespaceInclAsString(), "hbase:namespace");
+            TABLE_NAME.getNameWithNamespaceInclAsString(), "hbase:namespace");
   }
 
   @Test
@@ -287,7 +373,7 @@ public class TestHBCK2 {
   public void testFormatReportMissingInMetaOneMissing() throws IOException {
     TableName tableName = createTestTable(5);
     List<RegionInfo> regions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), tableName);
+            .getTableRegions(TEST_UTIL.getConnection(), tableName);
     HBCKMetaTableAccessor.deleteRegionInfo(TEST_UTIL.getConnection(), regions.get(0));
     String expectedResult = "Missing Regions for each table:\n";
     String result = testFormatMissingRegionsInMetaReport();
@@ -295,13 +381,31 @@ public class TestHBCK2 {
     assertTrue(result.contains(expectedResult));
     //validates our test table region is reported missing
     expectedResult = "\t" + tableName.getNameAsString() + "->\n\t\t"
-      + regions.get(0).getEncodedName();
+            + regions.get(0).getEncodedName();
     assertTrue(result.contains(expectedResult));
     //validates namespace region is not reported missing
     expectedResult = "\n\thbase:namespace -> No mismatching regions. This table is good!\n\t";
     assertTrue(result.contains(expectedResult));
   }
 
+  private void writeStringsToAFile(File testFile, String[] strs) throws IOException {
+    try (FileOutputStream output = new FileOutputStream(testFile, false)) {
+      for (String regionStr : strs) {
+        output.write((regionStr + System.lineSeparator()).getBytes());
+      }
+    }
+  }
+
+  private List<Long> getPidsFromResult(String result) {
+    List<Long> pids = new ArrayList<>();
+    try (Scanner scanner = new Scanner(result).useDelimiter("[\\D]+")) {
+      while (scanner.hasNext()) {
+        pids.add(scanner.nextLong());
+      }
+    }
+    return pids;
+  }
+
   private void unassigns(List<RegionInfo> regions, String[] regionStrsArray) throws IOException {
     try (ClusterConnection connection = this.hbck2.connect(); Hbck hbck = connection.getHbck()) {
       List<Long> pids = this.hbck2.unassigns(hbck, regionStrsArray);
@@ -316,23 +420,25 @@ public class TestHBCK2 {
   }
 
 
-  private void validateOpen(List<RegionInfo> regions) {
+  private void validateRegionEndState(List<Long> pids, List<RegionInfo> regions, boolean open) {
+    waitOnPids(pids);
     for (RegionInfo ri : regions) {
       RegionState rs = TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager().
               getRegionStates().getRegionState(ri.getEncodedName());
       LOG.info("RS: {}", rs.toString());
-      assertTrue(rs.toString(), rs.isOpened());
+      assertTrue(rs.toString(), open ? rs.isOpened() : rs.isClosed());
     }
   }
 
   private String testFormatMissingRegionsInMetaReport()
-      throws IOException {
+          throws IOException {
     HBCK2 hbck = new HBCK2(TEST_UTIL.getConfiguration());
     final StringBuilder builder = new StringBuilder();
     PrintStream originalOS = System.out;
     OutputStream testOS = new OutputStream() {
-      @Override public void write(int b) {
-        builder.append((char)b);
+      @Override
+      public void write(int b) {
+        builder.append((char) b);
       }
     };
     System.setOut(new PrintStream(testOS));
@@ -349,18 +455,18 @@ public class TestHBCK2 {
   }
 
   private void testAddMissingRegionsInMetaForTables(int missingRegions, int totalRegions)
-    throws Exception {
+          throws Exception {
     TableName tableName = createTestTable(totalRegions);
     HBCK2 hbck = new HBCK2(TEST_UTIL.getConfiguration());
     List<RegionInfo> regions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), tableName);
+            .getTableRegions(TEST_UTIL.getConnection(), tableName);
     Connection connection = TEST_UTIL.getConnection();
     regions.subList(0, missingRegions).forEach(r -> deleteRegionInfo(connection, r));
     int remaining = totalRegions - missingRegions;
     assertEquals("Table should have " + remaining + " regions in META.", remaining,
-      HBCKMetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tableName));
+            HBCKMetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tableName));
     List<Future<List<String>>> result = hbck.addMissingRegionsInMetaForTables("default:" +
-      tableName.getNameAsString());
+            tableName.getNameAsString());
 
     Integer total = result.stream().map(f -> {
       try {
@@ -372,46 +478,46 @@ public class TestHBCK2 {
     }).reduce(0, Integer::sum);
     assertEquals(missingRegions, total.intValue());
     assertEquals("Table regions should had been re-added in META.", totalRegions,
-      HBCKMetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tableName));
+            HBCKMetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tableName));
     //compare the added regions to make sure those are the same
     List<RegionInfo> newRegions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), tableName);
+            .getTableRegions(TEST_UTIL.getConnection(), tableName);
     assertEquals("All re-added regions should be the same", regions, newRegions);
   }
 
   private void testReportMissingRegionsInMeta(int missingRegionsInTestTbl,
-      int expectedTotalMissingRegions, String... namespaceOrTable) throws Exception {
+          int expectedTotalMissingRegions, String... namespaceOrTable) throws Exception {
     List<RegionInfo> regions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), TABLE_NAME);
+            .getTableRegions(TEST_UTIL.getConnection(), TABLE_NAME);
     Connection connection = TEST_UTIL.getConnection();
     regions.subList(0, missingRegionsInTestTbl).forEach(r -> deleteRegionInfo(connection, r));
     HBCK2 hbck = new HBCK2(TEST_UTIL.getConfiguration());
-    final Map<TableName,List<Path>> report =
-      hbck.reportTablesWithMissingRegionsInMeta(namespaceOrTable);
+    final Map<TableName, List<Path>> report =
+            hbck.reportTablesWithMissingRegionsInMeta(namespaceOrTable);
     long resultingMissingRegions = report.keySet().stream().mapToLong(nsTbl ->
-      report.get(nsTbl).size()).sum();
+            report.get(nsTbl).size()).sum();
     assertEquals(expectedTotalMissingRegions, resultingMissingRegions);
     String[] nullArgs = null;
     hbck.addMissingRegionsInMetaForTables(nullArgs);
   }
 
-  @Test (expected = IllegalArgumentException.class)
+  @Test(expected = IllegalArgumentException.class)
   public void testSetRegionStateInvalidRegionAndInvalidState() throws IOException {
     try (ClusterConnection connection = this.hbck2.connect()) {
       this.hbck2.setRegionState(connection, "NO_REGION", null);
     }
   }
 
-  private RegionState.State getCurrentRegionState(RegionInfo regionInfo) throws IOException{
+  private RegionState.State getCurrentRegionState(RegionInfo regionInfo) throws IOException {
     Table metaTable = TEST_UTIL.getConnection().getTable(TableName.valueOf("hbase:meta"));
     Get get = new Get(regionInfo.getRegionName());
     get.addColumn(HConstants.CATALOG_FAMILY, HConstants.STATE_QUALIFIER);
     Result result = metaTable.get(get);
     byte[] currentStateValue = result.getValue(HConstants.CATALOG_FAMILY,
-      HConstants.STATE_QUALIFIER);
+            HConstants.STATE_QUALIFIER);
     return currentStateValue != null ?
-      RegionState.State.valueOf(Bytes.toString(currentStateValue))
-      : null;
+            RegionState.State.valueOf(Bytes.toString(currentStateValue))
+            : null;
   }
 
   private RegionState.State getCurrentRegionState(RegionInfo primary, int replicaId)
@@ -428,9 +534,9 @@ public class TestHBCK2 {
   }
 
   private void waitOnPids(List<Long> pids) {
-    for (Long pid: pids) {
+    for (Long pid : pids) {
       while (!TEST_UTIL.getHBaseCluster().getMaster().getMasterProcedureExecutor().
-          isFinished(pid)) {
+              isFinished(pid)) {
         Threads.sleep(100);
       }
     }
@@ -456,37 +562,37 @@ public class TestHBCK2 {
 
   @Test
   public void testRemoveExtraRegionsInMetaTwoExtras() throws Exception {
-    this.testRemoveExtraRegionsInMetaForTables(2,5);
+    this.testRemoveExtraRegionsInMetaForTables(2, 5);
   }
 
   @Test
   public void testReportExtraRegionsInMetaAllNsTbls() throws Exception {
     String[] nullArgs = null;
     this.testReportExtraRegionsInMeta(5, 5,
-      nullArgs);
+            nullArgs);
   }
 
   @Test
   public void testReportExtraRegionsInMetaSpecificTbl() throws Exception {
     this.testReportExtraRegionsInMeta(5, 5,
-      TABLE_NAME.getNameWithNamespaceInclAsString());
+            TABLE_NAME.getNameWithNamespaceInclAsString());
   }
 
   @Test
   public void testReportExtraRegionsInMetaSpecificTblAndNsTbl() throws Exception {
     this.testReportExtraRegionsInMeta(5, 5,
-      TABLE_NAME.getNameWithNamespaceInclAsString(), "hbase:namespace");
+            TABLE_NAME.getNameWithNamespaceInclAsString(), "hbase:namespace");
   }
 
   @Test
   public void testReportExtraRegionsInMetaSpecificTblAndNsTblAlsoExtra() throws Exception {
     TableName tableName = createTestTable(5);
     List<RegionInfo> regions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), tableName);
+            .getTableRegions(TEST_UTIL.getConnection(), tableName);
     deleteRegionDir(tableName, regions.get(0).getEncodedName());
     this.testReportExtraRegionsInMeta(5, 6,
-      TABLE_NAME.getNameWithNamespaceInclAsString(),
-      tableName.getNameWithNamespaceInclAsString());
+            TABLE_NAME.getNameWithNamespaceInclAsString(),
+            tableName.getNameWithNamespaceInclAsString());
   }
 
   @Test
@@ -504,7 +610,7 @@ public class TestHBCK2 {
   public void testFormatReportExtraInMetaOneExtra() throws IOException {
     TableName tableName = createTestTable(5);
     List<RegionInfo> regions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), tableName);
+            .getTableRegions(TEST_UTIL.getConnection(), tableName);
     deleteRegionDir(tableName, regions.get(0).getEncodedName());
     String expectedResult = "Regions in Meta but having no equivalent dir, for each table:\n";
     String result = testFormatExtraRegionsInMetaReport();
@@ -512,7 +618,7 @@ public class TestHBCK2 {
     assertTrue(result.contains(expectedResult));
     //validates our test table region is reported as extra
     expectedResult = "\t" + tableName.getNameAsString() + "->\n\t\t"
-      + regions.get(0).getEncodedName();
+            + regions.get(0).getEncodedName();
     assertTrue(result.contains(expectedResult));
     //validates namespace region is not reported missing
     expectedResult = "\n\thbase:namespace -> No mismatching regions. This table is good!\n\t";
@@ -533,7 +639,7 @@ public class TestHBCK2 {
   @Test
   public void testFormatFixExtraRegionsInMetaNoExtraSpecifyTable() throws IOException {
     final String expectedResult = "Regions in Meta but having no equivalent dir, for each table:\n"
-      + "\thbase:namespace -> No mismatching regions. This table is good!\n\t";
+            + "\thbase:namespace -> No mismatching regions. This table is good!\n\t";
     String result = testFormatExtraRegionsInMetaFix("hbase:namespace");
     assertTrue(result.contains(expectedResult));
   }
@@ -542,7 +648,7 @@ public class TestHBCK2 {
   public void testFormatFixExtraInMetaOneExtra() throws IOException {
     TableName tableName = createTestTable(5);
     List<RegionInfo> regions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), tableName);
+            .getTableRegions(TEST_UTIL.getConnection(), tableName);
     deleteRegionDir(tableName, regions.get(0).getEncodedName());
     String expectedResult = "Regions in Meta but having no equivalent dir, for each table:\n";
     String result = testFormatExtraRegionsInMetaFix(null);
@@ -550,7 +656,7 @@ public class TestHBCK2 {
     assertTrue(result.contains(expectedResult));
     //validates our test table region is reported as extra
     expectedResult = "\t" + tableName.getNameAsString() + "->\n\t\t"
-      + regions.get(0).getEncodedName();
+            + regions.get(0).getEncodedName();
     assertTrue(result.contains(expectedResult));
     //validates namespace region is not reported missing
     expectedResult = "\n\thbase:namespace -> No mismatching regions. This table is good!\n\t";
@@ -561,7 +667,7 @@ public class TestHBCK2 {
   public void testFormatFixExtraInMetaOneExtraSpecificTable() throws IOException {
     TableName tableName = createTestTable(5);
     List<RegionInfo> regions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), tableName);
+            .getTableRegions(TEST_UTIL.getConnection(), tableName);
     deleteRegionDir(tableName, regions.get(0).getEncodedName());
     String expectedResult = "Regions in Meta but having no equivalent dir, for each table:\n";
     String result = testFormatExtraRegionsInMetaFix(tableName.getNameWithNamespaceInclAsString());
@@ -569,7 +675,7 @@ public class TestHBCK2 {
     assertTrue(result.contains(expectedResult));
     //validates our test table region is reported as extra
     expectedResult = "\t" + tableName.getNameAsString() + "->\n\t\t"
-      + regions.get(0).getEncodedName();
+            + regions.get(0).getEncodedName();
     assertTrue(result.contains(expectedResult));
     //validates namespace region is not reported missing
     expectedResult = "\n\thbase:namespace -> No mismatching regions. This table is good!\n\t";
@@ -583,7 +689,7 @@ public class TestHBCK2 {
     }
   }
 
-  @Test (expected = UnsupportedOperationException.class)
+  @Test(expected = UnsupportedOperationException.class)
   public void testFunctionNotSupported() throws IOException {
     try (ClusterConnection connection = this.hbck2.connect()) {
       this.hbck2.checkFunctionSupported(connection, "test");
@@ -595,10 +701,10 @@ public class TestHBCK2 {
   }
 
   private String testFormatExtraRegionsInMetaFix(String table) throws IOException {
-    if(table!=null) {
-      return testRunWithArgs(new String[] {EXTRA_REGIONS_IN_META, "-f", table});
+    if (table != null) {
+      return testRunWithArgs(new String[]{EXTRA_REGIONS_IN_META, "-f", table});
     } else {
-      return testRunWithArgs(new String[] {EXTRA_REGIONS_IN_META, "-f"});
+      return testRunWithArgs(new String[]{EXTRA_REGIONS_IN_META, "-f"});
     }
   }
 
@@ -607,8 +713,9 @@ public class TestHBCK2 {
     final StringBuilder builder = new StringBuilder();
     PrintStream originalOS = System.out;
     OutputStream testOS = new OutputStream() {
-      @Override public void write(int b) throws IOException {
-        builder.append((char)b);
+      @Override
+      public void write(int b) throws IOException {
+        builder.append((char) b);
       }
     };
     System.setOut(new PrintStream(testOS));
@@ -618,33 +725,42 @@ public class TestHBCK2 {
   }
 
   private void testRemoveExtraRegionsInMetaForTables(int extraRegions, int totalRegions)
-    throws Exception {
+          throws Exception {
     TableName tableName = createTestTable(totalRegions);
     HBCK2 hbck = new HBCK2(TEST_UTIL.getConfiguration());
     List<RegionInfo> regions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), tableName);
+            .getTableRegions(TEST_UTIL.getConnection(), tableName);
     regions.subList(0, extraRegions).forEach(r -> deleteRegionDir(tableName, r.getEncodedName()));
     int remaining = totalRegions - extraRegions;
-    assertEquals(extraRegions, hbck.extraRegionsInMeta(new String[]
-      { "-f",
-        "default:" + tableName.getNameAsString()
-      }).get(tableName).size());
+    assertEquals(extraRegions, hbck.extraRegionsInMeta(new String[]{"-f",
+      "default:" + tableName.getNameAsString()}).get(tableName).size());
     assertEquals("Table regions should had been removed from META.", remaining,
-      HBCKMetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tableName));
+            HBCKMetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tableName));
   }
 
   private void testReportExtraRegionsInMeta(int extraRegionsInTestTbl,
-    int expectedTotalExtraRegions, String... namespaceOrTable) throws Exception {
+          int expectedTotalExtraRegions, String... namespaceOrTable) throws Exception {
     List<RegionInfo> regions = HBCKMetaTableAccessor
-      .getTableRegions(TEST_UTIL.getConnection(), TABLE_NAME);
+            .getTableRegions(TEST_UTIL.getConnection(), TABLE_NAME);
     regions.subList(0, extraRegionsInTestTbl).forEach(r -> deleteRegionDir(TABLE_NAME,
-      r.getEncodedName()));
+            r.getEncodedName()));
     HBCK2 hbck = new HBCK2(TEST_UTIL.getConfiguration());
-    final Map<TableName,List<String>> report =
-      hbck.extraRegionsInMeta(namespaceOrTable);
+    final Map<TableName, List<String>> report =
+            hbck.extraRegionsInMeta(namespaceOrTable);
     long resultingExtraRegions = report.keySet().stream().mapToLong(nsTbl ->
-      report.get(nsTbl).size()).sum();
+            report.get(nsTbl).size()).sum();
     assertEquals(expectedTotalExtraRegions, resultingExtraRegions);
   }
 
+  @Test
+  public void testByPassWithInputFiles() throws IOException {
+    File testFile = new File(TEST_UTIL.getDataTestDir().toString(), "inputForSetRegionStateTest");
+    writeStringsToAFile(testFile, new String[]{"100568", "200568"});
+    List<Boolean> result = this.hbck2.bypass(new String[]{"-i", testFile.toString()});
+    assertNotNull(result);
+    assertEquals(result.size(), 2);
+    for (boolean rs : result) {
+      assertFalse(rs);
+    }
+  }
 }
diff --git a/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCKCommandLineParsing.java b/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCKCommandLineParsing.java
index da37648..8cd2976 100644
--- a/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCKCommandLineParsing.java
+++ b/hbase-hbck2/src/test/java/org/apache/hbase/TestHBCKCommandLineParsing.java
@@ -21,20 +21,42 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 import java.io.ByteArrayOutputStream;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.PrintStream;
 import java.util.Properties;
 
 import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
-
 /**
  * Does command-line parsing tests. No clusters.
  * @see TestHBCK2 for cluster-tests.
  */
 public class TestHBCKCommandLineParsing {
   private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  /**
+   * A 'connected' hbck2 instance.
+   */
+  private HBCK2 hbck2;
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    TEST_UTIL.startMiniCluster(3);
+  }
+
+  @AfterClass
+  public static void afterClass() throws Exception {
+    TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Before
+  public void before() throws Exception {
+    this.hbck2 = new HBCK2(TEST_UTIL.getConfiguration());
+  }
 
   @Test
   public void testHelp() throws IOException {
@@ -61,16 +83,21 @@ public class TestHBCKCommandLineParsing {
 
   @Test (expected=NumberFormatException.class)
   public void testCommandWithOptions() throws IOException {
-    HBCK2 hbck = new HBCK2(TEST_UTIL.getConfiguration());
     // The 'x' below should cause the NumberFormatException. The Options should all be good.
-    hbck.run(new String[]{"bypass", "--lockWait=3", "--override", "--recursive", "x"});
+    this.hbck2.run(new String[]{"bypass", "--lockWait=3", "--override", "--recursive", "x"});
+  }
+
+  @Test (expected=FileNotFoundException.class)
+  public void testInputFileOption() throws IOException {
+    // The 'x' below should cause the io exception for file not found.
+    // The Options should all be good.
+    this.hbck2.run(new String[]{"bypass", "--override", "--inputFile", "x"});
   }
 
   @Test (expected=IllegalArgumentException.class)
   public void testSetRegionStateCommandInvalidState() throws IOException {
-    HBCK2 hbck = new HBCK2(TEST_UTIL.getConfiguration());
-    // The 'x' below should cause the IllegalArgumentException. The Options should all be good.
-    hbck.run(new String[]{"setRegionState", "region_encoded", "INVALID_STATE"});
+    // The 'INVALID_STATE' below should cause the IllegalArgumentException.
+    this.hbck2.run(new String[]{"setRegionState", "region_encoded", "INVALID_STATE"});
   }
 
   @Test
@@ -90,11 +117,10 @@ public class TestHBCKCommandLineParsing {
     PrintStream stream = new PrintStream(os);
     PrintStream oldOut = System.out;
     System.setOut(stream);
-    HBCK2 hbck = new HBCK2(TEST_UTIL.getConfiguration());
     if (option != null) {
-      hbck.run(new String[] { option });
+      this.hbck2.run(new String[] { option });
     } else {
-      hbck.run(null);
+      this.hbck2.run(null);
     }
     stream.close();
     os.close();