You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ja...@apache.org on 2014/09/05 23:54:17 UTC

git commit: AMBARI-7181. Ambari cluster should support Nagios without HDFS. (jaimin)

Repository: ambari
Updated Branches:
  refs/heads/trunk 1ef1fe475 -> e76fa9ab3


AMBARI-7181. Ambari cluster should support Nagios without HDFS. (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e76fa9ab
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e76fa9ab
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e76fa9ab

Branch: refs/heads/trunk
Commit: e76fa9ab3306dd6d72099bbe1074867ffb3cede4
Parents: 1ef1fe4
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Fri Sep 5 14:53:46 2014 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Fri Sep 5 14:53:52 2014 -0700

----------------------------------------------------------------------
 .../services/NAGIOS/package/scripts/params.py   | 56 +++++++++++---------
 1 file changed, 30 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e76fa9ab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py
index 87c6d3e..ec6c885 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py
@@ -88,21 +88,31 @@ nagios_principal_name = default("/configurations/nagios-env/nagios_principal_nam
 hadoop_ssl_enabled = False
 
 oozie_server_port = get_port_from_url(config['configurations']['oozie-site']['oozie.base.url'])
+namenode_host = default("/clusterHostInfo/namenode_host", None)
 
-# different to HDP1    
-if 'dfs.namenode.http-address' in config['configurations']['hdfs-site']:
-  namenode_port = get_port_from_url(config['configurations']['hdfs-site']['dfs.namenode.http-address'])
+# - test for HDFS or HCFS (glusterfs)
+if 'namenode_host' in config['clusterHostInfo']:
+  ishdfs_value = "HDFS"
 else:
-  namenode_port = "50070" 
+  ishdfs_value = None
 
-if 'dfs.namenode.secondary.http-address' in config['configurations']['hdfs-site']:
-  snamenode_port = get_port_from_url(config['configurations']['hdfs-site']['dfs.namenode.secondary.http-address'])
-else:
-  snamenode_port = "50071"
+has_namenode = not namenode_host == None
+
+# different to HDP1
+if has_namenode:
+  if 'dfs.namenode.http-address' in config['configurations']['hdfs-site']:
+    namenode_port = get_port_from_url(config['configurations']['hdfs-site']['dfs.namenode.http-address'])
+  else:
+    namenode_port = "50070"
+
+  if 'dfs.namenode.secondary.http-address' in config['configurations']['hdfs-site']:
+    snamenode_port = get_port_from_url(config['configurations']['hdfs-site']['dfs.namenode.secondary.http-address'])
+  else:
+    snamenode_port = "50071"
 
-if 'dfs.journalnode.http-address' in config['configurations']['hdfs-site']:
-  journalnode_port = get_port_from_url(config['configurations']['hdfs-site']['dfs.journalnode.http-address'])
-  datanode_port = get_port_from_url(config['configurations']['hdfs-site']['dfs.datanode.http.address'])
+  if 'dfs.journalnode.http-address' in config['configurations']['hdfs-site']:
+    journalnode_port = get_port_from_url(config['configurations']['hdfs-site']['dfs.journalnode.http-address'])
+    datanode_port = get_port_from_url(config['configurations']['hdfs-site']['dfs.datanode.http.address'])
 
 hbase_master_rpc_port = default('/configurations/hbase-site/hbase.master.port', "60000")
 rm_port = get_port_from_url(config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'])
@@ -124,15 +134,16 @@ ahs_port = get_port_from_url(config['configurations']['yarn-site']['yarn.timelin
 
 # use sensible defaults for checkpoint as they are required by Nagios and 
 # may not be part of hdfs-site.xml on an upgrade
-if 'dfs.namenode.checkpoint.period' in config['configurations']['hdfs-site']:
-  dfs_namenode_checkpoint_period = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.period']
-else:
-  dfs_namenode_checkpoint_period = '21600'
+if has_namenode:
+  if 'dfs.namenode.checkpoint.period' in config['configurations']['hdfs-site']:
+    dfs_namenode_checkpoint_period = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.period']
+  else:
+    dfs_namenode_checkpoint_period = '21600'
   
-if 'dfs.namenode.checkpoint.txns' in config['configurations']['hdfs-site']:
-  dfs_namenode_checkpoint_txns = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.txns']
-else:
-  dfs_namenode_checkpoint_txns = '1000000'
+  if 'dfs.namenode.checkpoint.txns' in config['configurations']['hdfs-site']:
+    dfs_namenode_checkpoint_txns = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.txns']
+  else:
+    dfs_namenode_checkpoint_txns = '1000000'
 
 # this is different for HDP1
 nn_metrics_property = "FSNamesystem"
@@ -205,13 +216,6 @@ nagios_web_password = config['configurations']['nagios-env']['nagios_web_passwor
 user_group = config['configurations']['cluster-env']['user_group']
 nagios_contact = config['configurations']['nagios-env']['nagios_contact']
 
-# - test for HDFS or HCFS (glusterfs)
-if 'namenode_host' in config['clusterHostInfo']:
-  namenode_host = default("/clusterHostInfo/namenode_host", None)
-  ishdfs_value = "HDFS"
-else:
-  namenode_host = None
-  ishdfs_value = None 
 
 _snamenode_host = default("/clusterHostInfo/snamenode_host", None)
 _jtnode_host = default("/clusterHostInfo/jtnode_host", None)