You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hawq.apache.org by od...@apache.org on 2016/05/03 02:33:19 UTC

[03/13] incubator-hawq git commit: HAWQ-668. Fix hawq check run without HADOOP_HOME

HAWQ-668. Fix hawq check run without HADOOP_HOME


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/95197ad7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/95197ad7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/95197ad7

Branch: refs/heads/HAWQ-703
Commit: 95197ad7edd64fba5ad210c0944c4fc99ba90438
Parents: e74109b
Author: rlei <rl...@pivotal.io>
Authored: Wed Apr 20 00:08:05 2016 +0800
Committer: Ruilong Huo <rh...@pivotal.io>
Committed: Wed Apr 20 10:54:15 2016 +0800

----------------------------------------------------------------------
 tools/bin/gpcheck      | 13 ++++++++++---
 tools/doc/gpcheck_help | 44 ++++++++++++++++++++++++++++++++++++++++++--
 2 files changed, 52 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/95197ad7/tools/bin/gpcheck
----------------------------------------------------------------------
diff --git a/tools/bin/gpcheck b/tools/bin/gpcheck
index 1d0019c..6099867 100755
--- a/tools/bin/gpcheck
+++ b/tools/bin/gpcheck
@@ -736,6 +736,9 @@ def testHAWQconfig(host):
     if hawq is None:
         return # skip HAWQ test when hawq is None
 
+    if hdfs is None:
+        return # skip HAWQ test when hdfs is None
+
     if options.verbose:
         logger.info("-- test HAWQ config")
 
@@ -743,7 +746,9 @@ def testHAWQconfig(host):
         checkFailed(host.hostname, "collect HAWQ configuration error: %s" % hawq.errormsg)
         return
 
-    datanode_list = parse_host_list_file("%s/etc/hadoop/slaves" % HADOOP_HOME)
+    datanode_list = list()
+    if HADOOP_HOME:
+        datanode_list = parse_host_list_file("%s/etc/hadoop/slaves" % HADOOP_HOME)
     is_datanode = False
     if host.hostname in datanode_list:
         is_datanode = True
@@ -819,7 +824,7 @@ def testHAWQconfig(host):
             else:
                 checkFailed(host.hostname, "HAWQ configuration dfs.domain.socket.path: %s, does not exist on %s" % (actual_config['dfs.domain.socket.path'], host.hostname))
 
-    if 'output.replace-datanode-on-failure' in actual_config:
+    if 'output.replace-datanode-on-failure' in actual_config and len(datanode_list) > 0:
         if len(datanode_list) < 4:
             if actual_config['output.replace-datanode-on-failure'] == 'true':
                 checkFailed(host.hostname, "HAWQ configuration: output.replace-datanode-on-failure expect false, current is true")
@@ -926,7 +931,9 @@ def testHDFSConfig(host):
 
     # Check if nodemanager direcotries exists
     directory_check_list = []
-    datanode_list = parse_host_list_file("%s/etc/hadoop/slaves" % HADOOP_HOME)
+    datanode_list = list()
+    if HADOOP_HOME:
+        datanode_list = parse_host_list_file("%s/etc/hadoop/slaves" % HADOOP_HOME)
     is_datanode = False
     if host.hostname in datanode_list:
         is_datanode = True

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/95197ad7/tools/doc/gpcheck_help
----------------------------------------------------------------------
diff --git a/tools/doc/gpcheck_help b/tools/doc/gpcheck_help
index f57765e..587f32f 100644
--- a/tools/doc/gpcheck_help
+++ b/tools/doc/gpcheck_help
@@ -6,7 +6,8 @@ Verifies and validates HAWQ platform settings.
 SYNOPSIS
 *****************************************************
 
-hawq check -f <hostfile_hawq_check> [--hadoop <hadoop_home>]
+hawq check -f <hostfile_hawq_check>
+        [--hadoop | --hadoop-home <hadoop_home>]
         [--stdout | --zipout] [--config <config_file>]
 
 hawq check --zipin <hawq_check_zipfile>
@@ -68,13 +69,38 @@ OPTIONS
  contain a single host name for all hosts in your HAWQ system 
  (master, standby master, and segments).
 
---hadoop hadoop_home
+--hadoop/--hadoop-home hadoop_home
 
  Use this option to specify your hadoop installation location so that
  hawq check can validate HDFS settings. This option is not needed when
  HADOOP_HOME environment variable is set.
 
 
+--kerberos
+
+ Use this option to specify HDFS/YARN enables Kerberos mode, so
+ that hawq check can validate HDFS/YARN settings with Kerberos
+ enabled.
+
+
+--hdfs-ha
+
+ Use this option to specify HDFS HA mode enabled, so that hawq check
+ can validate HDFS settings with HA enabled.
+
+
+--yarn
+
+ Use this option to specify HAWQ enables YARN mode, so that hawq check
+ can validate HAWQ/YARN settings.
+
+
+--yarn-ha
+
+ Use this option to specify HAWQ enables YARN HA mode, so that
+ hawq check can validate HAWQ/YARN settings with YARN-HA enabled.
+
+
 --stdout
 
  Display collected host information from hawq check. No checks or 
@@ -115,6 +141,20 @@ entering a host file and specifying the hadoop location:
  # hawq check -f hostfile_hawq_check --hadoop ~/hadoop-2.0.0/
 
 
+Verify and validate the HAWQ platform settings with HDFS HA
+enabled, YARN HA enabled and Kerberos enabled:
+
+ # hawq check -f hostfile_hawq_check --hadoop ~/hadoop-2.0.0/
+              --hdfs-ha --yarn-ha --kerberos
+
+
+Verify and validate the HAWQ platform settings with HDFS HA
+enabled, and Kerberos enabled:
+
+ # hawq check -f hostfile_hawq_check --hadoop ~/hadoop-2.0.0/
+              --hdfs-ha --kerberos
+
+
 Save HAWQ platform settings to a zip file, when HADOOP_HOME
 environment variable is set: