You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by el...@apache.org on 2019/06/03 20:52:34 UTC

[hbase] branch branch-1 updated: HBASE-22396 Backport HBASE-21536 to branch-1

This is an automated email from the ASF dual-hosted git repository.

elserj pushed a commit to branch branch-1
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-1 by this push:
     new e1bca19  HBASE-22396 Backport HBASE-21536 to branch-1
e1bca19 is described below

commit e1bca196318b9c846872866efe1fd1114dbd74bb
Author: Artem Ervits <ge...@gmail.com>
AuthorDate: Wed May 22 15:44:06 2019 -0400

    HBASE-22396 Backport HBASE-21536 to branch-1
    
    Fix completebulkload usage instructions
    
    Signed-off-by: Josh Elser <el...@apache.org>
---
 bin/hbase                                          | 47 ++++++++++++----------
 .../hbase/mapreduce/LoadIncrementalHFiles.java     | 14 ++++---
 2 files changed, 33 insertions(+), 28 deletions(-)

diff --git a/bin/hbase b/bin/hbase
index 4877e47..f82acb6 100755
--- a/bin/hbase
+++ b/bin/hbase
@@ -84,28 +84,29 @@ if [ $# = 0 ]; then
   echo ""
   echo "Commands:"
   echo "Some commands take arguments. Pass no args or -h for usage."
-  echo "  shell           Run the HBase shell"
-  echo "  hbck            Run the hbase 'fsck' tool"
-  echo "  snapshot        Tool for managing snapshots"
-  echo "  snapshotinfo    Tool for dumping snapshot information"
-  echo "  wal             Write-ahead-log analyzer"
-  echo "  hfile           Store file analyzer"
-  echo "  zkcli           Run the ZooKeeper shell"
-  echo "  upgrade         Upgrade hbase"
-  echo "  master          Run an HBase HMaster node" 
-  echo "  regionserver    Run an HBase HRegionServer node" 
-  echo "  zookeeper       Run a Zookeeper server"
-  echo "  rest            Run an HBase REST server" 
-  echo "  thrift          Run the HBase Thrift server" 
-  echo "  thrift2         Run the HBase Thrift2 server"
-  echo "  clean           Run the HBase clean up script"
-  echo "  classpath       Dump hbase CLASSPATH"
-  echo "  mapredcp        Dump CLASSPATH entries required by mapreduce"
-  echo "  pe              Run PerformanceEvaluation"
-  echo "  ltt             Run LoadTestTool"
-  echo "  canary          Run the Canary tool"
-  echo "  version         Print the version"
-  echo "  CLASSNAME       Run the class named CLASSNAME"
+  echo "  shell            Run the HBase shell"
+  echo "  hbck             Run the hbase 'fsck' tool"
+  echo "  snapshot         Tool for managing snapshots"
+  echo "  snapshotinfo     Tool for dumping snapshot information"
+  echo "  wal              Write-ahead-log analyzer"
+  echo "  hfile            Store file analyzer"
+  echo "  zkcli            Run the ZooKeeper shell"
+  echo "  upgrade          Upgrade hbase"
+  echo "  master           Run an HBase HMaster node"
+  echo "  regionserver     Run an HBase HRegionServer node"
+  echo "  zookeeper        Run a Zookeeper server"
+  echo "  rest             Run an HBase REST server"
+  echo "  thrift           Run the HBase Thrift server"
+  echo "  thrift2          Run the HBase Thrift2 server"
+  echo "  clean            Run the HBase clean up script"
+  echo "  classpath        Dump hbase CLASSPATH"
+  echo "  mapredcp         Dump CLASSPATH entries required by mapreduce"
+  echo "  completebulkload Run LoadIncrementalHFiles tool"
+  echo "  pe               Run PerformanceEvaluation"
+  echo "  ltt              Run LoadTestTool"
+  echo "  canary           Run the Canary tool"
+  echo "  version          Print the version"
+  echo "  CLASSNAME        Run the class named CLASSNAME"
   exit 1
 fi
 
@@ -403,6 +404,8 @@ elif [ "$COMMAND" = "canary" ] ; then
   HBASE_OPTS="$HBASE_OPTS $HBASE_CANARY_OPTS"
 elif [ "$COMMAND" = "version" ] ; then
   CLASS='org.apache.hadoop.hbase.util.VersionInfo'
+elif [ "$COMMAND" = "completebulkload" ] ; then
+  CLASS='org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles'
 else
   CLASS=$COMMAND
 fi
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 9d7d80b..2945214 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -168,12 +168,14 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
   }
 
   private void usage() {
-    System.err.println("usage: " + NAME + " /path/to/hfileoutputformat-output tablename -loadTable"
-        + "\n -D" + CREATE_TABLE_CONF_KEY + "=no - can be used to avoid creation of table by "
-        + "this tool\n  Note: if you set this to 'no', then the target table must already exist "
-        + "in HBase\n -loadTable implies your baseDirectory to store file has a depth of 3 ,you"
-        + " must have an existing table"
-        + "\n");
+    System.err.println("usage: " + "bin/hbase completebulkload <-Dargs> "
+      + "</path/to/hfileoutputformat-output> <tablename>\n"
+      + "\t-D" + CREATE_TABLE_CONF_KEY + "=no can be used to avoid creation "
+      + "of a table by this tool.\n"
+      + "\t Note: if you set this to 'no', then target table must already exist.\n"
+      + "\t-loadTable switch implies your baseDirectory to store file has a "
+      + "depth of 3, table must exist\n"
+      + "\t and -loadTable switch is the last option on the command line.\n\n");
   }
 
   private static interface BulkHFileVisitor<TFamily> {