You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by dm...@apache.org on 2014/02/20 22:40:22 UTC

[1/3] git commit: AMBARI-4705. Alerts for Falcon (Eugene Chekanskiy via dlysnichenko)

Repository: ambari
Updated Branches:
  refs/heads/trunk d291fb456 -> 093ee03f1


AMBARI-4705. Alerts for Falcon (Eugene Chekanskiy via dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9182ccab
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9182ccab
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9182ccab

Branch: refs/heads/trunk
Commit: 9182ccab00e174435a02f7e625300625a276b78f
Parents: d291fb4
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Thu Feb 20 23:35:53 2014 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Thu Feb 20 23:35:53 2014 +0200

----------------------------------------------------------------------
 .../NAGIOS/package/files/check_webui.sh         |  9 +++++++-
 .../services/NAGIOS/package/scripts/params.py   |  3 +++
 .../templates/hadoop-servicegroups.cfg.j2       |  6 +++++
 .../package/templates/hadoop-services.cfg.j2    | 23 +++++++++++++++++++-
 .../HDP/2.1.1/services/OOZIE/metainfo.xml       |  4 ++++
 .../src/addOns/nagios/scripts/nagios_alerts.php |  1 +
 6 files changed, 44 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9182ccab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_webui.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_webui.sh b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_webui.sh
index b23045e..87e6aa6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_webui.sh
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/files/check_webui.sh
@@ -78,7 +78,14 @@ historyserver2)
       exit 1;
     fi
     ;;
-*) echo "UNKNOWN: Invalid service name [$service], valid options [jobtracker|jobhistory|hbase|namenode|resourcemanager|historyserver2]"
+falconserver)
+    hsweburl="http://$host:$port/"
+    if [[ `checkurl "$hsweburl"` -ne 0 ]]; then
+      echo "WARNING: FalconServer Web UI not accessible : $hsweburl";
+      exit 1;
+    fi
+    ;;
+*) echo "UNKNOWN: Invalid service name [$service], valid options [jobtracker|jobhistory|hbase|namenode|resourcemanager|historyserver2|falconserver]"
    exit 3
    ;;
 esac

http://git-wip-us.apache.org/repos/asf/ambari/blob/9182ccab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py
index c16c038..647fd92 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/scripts/params.py
@@ -65,6 +65,7 @@ flume_port = "4159"
 hive_metastore_port = config['configurations']['global']['hive_metastore_port'] #"9083"
 templeton_port = config['configurations']['webhcat-site']['templeton.port'] #"50111"
 hbase_rs_port = "60030"
+falcon_port = config['configurations']['global']['falcon_port']
 ahs_port = get_port_from_url(config['configurations']['yarn-site']['yarn.ahs.webapp.address'])
 
 # this is different for HDP1
@@ -133,6 +134,7 @@ hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts",None)
 _hive_server_host = default("/clusterHostInfo/hive_server_host",None)
 _oozie_server = default("/clusterHostInfo/oozie_server",None)
 _webhcat_server_host = default("/clusterHostInfo/webhcat_server_host",None)
+_falcon_host = config['clusterHostInfo']['falcon_server_hosts']
 # can differ on HDP1
 #_mapred_tt_hosts = _slave_hosts
 #if hbase_rs_hosts not given it is assumed that region servers on same nodes as slaves
@@ -163,5 +165,6 @@ hostgroup_defs = {
     'nodemanagers' : _nm_hosts,
     'historyserver2' : _hs_host,
     'journalnodes' : _journalnode_hosts,
+    'falcon-server' : _falcon_host
     'ats-servers' : _app_timeline_server_hosts
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9182ccab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
index 0101ce6..b07dbe8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-servicegroups.cfg.j2
@@ -77,4 +77,10 @@ define servicegroup {
   servicegroup_name  HUE
   alias  HUE Checks
 }
+{% endif %}
+{% if hostgroup_defs['falcon-server'] %}
+define servicegroup {
+  servicegroup_name  FALCON
+  alias  FALCON Checks
+}
 {% endif %}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9182ccab/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2
index 18387dd..ace9180 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/NAGIOS/package/templates/hadoop-services.cfg.j2
@@ -654,6 +654,27 @@ define service {
 }
 {% endif %}
 
+#FALCON checks
+{% if hostgroup_defs['falcon-server'] %}
+define service {
+        hostgroup_name          falcon-server
+        service_description     FALCON::Falcon Server status
+        servicegroups           FALCON
+        check_command           check_tcp_wrapper!{{ falcon_port }}!-w 1 -c 1
+        normal_check_interval   1
+        retry_check_interval    0.5
+        max_check_attempts      3
+}
+define service {
+        hostgroup_name          falcon-server
+        service_description     FALCON::Falcon Web UI status
+        servicegroups           FALCON
+        check_command           check_webui!falconserver!{{ falcon_port }}
+        normal_check_interval   1
+        retry_check_interval    0.5
+        max_check_attempts      3
+}
+{% endif %}
 {% if hostgroup_defs['ats-servers'] %}
 define service {
         hostgroup_name          ats-servers
@@ -665,4 +686,4 @@ define service {
         retry_check_interval    0.5
         max_check_attempts      3
 }
-{% endif %}
\ No newline at end of file
+{% endif %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9182ccab/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/OOZIE/metainfo.xml
index 494533f..9a2f1cc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/OOZIE/metainfo.xml
@@ -44,6 +44,10 @@
               <type>rpm</type>
               <name>falcon</name>
             </package>
+            <package>
+              <type>rpm</type>
+              <name>zip</name>
+            </package>
           </packages>
         </osSpecific>
       </osSpecifics>

http://git-wip-us.apache.org/repos/asf/ambari/blob/9182ccab/contrib/addons/src/addOns/nagios/scripts/nagios_alerts.php
----------------------------------------------------------------------
diff --git a/contrib/addons/src/addOns/nagios/scripts/nagios_alerts.php b/contrib/addons/src/addOns/nagios/scripts/nagios_alerts.php
index c835f6f..ab4227c 100644
--- a/contrib/addons/src/addOns/nagios/scripts/nagios_alerts.php
+++ b/contrib/addons/src/addOns/nagios/scripts/nagios_alerts.php
@@ -407,6 +407,7 @@ function hdp_mon_generate_response( $response_data )
       case "OOZIE":
       case "WEBHCAT":
       case "GANGLIA":
+      case "FALCON":
       case "PUPPET":
         break;
       default:


[3/3] git commit: AMBARI-4737. Falcon Server can not be restarted (Eugene Chekanskiy via dlysnichenko)

Posted by dm...@apache.org.
AMBARI-4737. Falcon Server can not be restarted (Eugene Chekanskiy via dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/093ee03f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/093ee03f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/093ee03f

Branch: refs/heads/trunk
Commit: 093ee03f14bd17fe9111a39a07bff34dcaca25f6
Parents: 87b4a36
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Thu Feb 20 23:39:13 2014 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Thu Feb 20 23:39:13 2014 +0200

----------------------------------------------------------------------
 .../stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py    | 3 +++
 .../stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py    | 3 +++
 .../src/test/python/stacks/2.1.1/FALCON/test_falcon_server.py     | 3 +++
 3 files changed, 9 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/093ee03f/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py
index da613af..1cedbdf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py
@@ -63,3 +63,6 @@ def falcon(type, action = None):
                      '{falcon_home}/bin/falcon-stop'),
               user=params.falcon_user
       )
+      File(params.server_pid_file,
+           action='delete'
+      )

http://git-wip-us.apache.org/repos/asf/ambari/blob/093ee03f/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py
index 9a6c05e..fa7532d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py
@@ -19,6 +19,9 @@ limitations under the License.
 
 from resource_management import *
 
+import status_params
+from status_params import server_pid_file
+
 config = Script.get_config()
 
 oozie_user = config['configurations']['global']['oozie_user']

http://git-wip-us.apache.org/repos/asf/ambari/blob/093ee03f/ambari-server/src/test/python/stacks/2.1.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1.1/FALCON/test_falcon_server.py
index 56f0446..7dc1720 100644
--- a/ambari-server/src/test/python/stacks/2.1.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1.1/FALCON/test_falcon_server.py
@@ -56,6 +56,9 @@ class TestFalconServer(RMFTestCase):
     self.assertResourceCalled('Execute',
                           'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 FALCON_LOG_DIR=/var/log/falcon FALCON_PID_DIR=/var/run/falcon FALCON_DATA_DIR=/hadoop/falcon/activemq /usr/lib/falcon/bin/falcon-stop',
                           user='falcon', )
+    self.assertResourceCalled('File',
+                              '/var/run/falcon/falcon.pid',
+                              action=['delete'])
     self.assertNoMoreResources()
 
   def test_configure_default(self):


[2/3] git commit: AMBARI-4710. Add unittets for hooks in secured mode. (Eugene Chekanskiy via dlysnichenko)

Posted by dm...@apache.org.
AMBARI-4710. Add unittets for hooks in secured mode. (Eugene Chekanskiy via dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/87b4a367
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/87b4a367
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/87b4a367

Branch: refs/heads/trunk
Commit: 87b4a3678b72515a008a9e61478e3dd1eeadc4eb
Parents: 9182cca
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Thu Feb 20 23:37:44 2014 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Thu Feb 20 23:37:44 2014 +0200

----------------------------------------------------------------------
 .../hooks/before-START/test_before_start.py     | 172 +++++++++++++++++-
 .../hooks/before-INSTALL/test_before_install.py |   2 +-
 .../hooks/before-START/test_before_start.py     | 178 ++++++++++++++++++-
 3 files changed, 349 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/87b4a367/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
index d3b7788..d8643d6 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
@@ -23,7 +23,7 @@ from stacks.utils.RMFTestCase import *
 
 @patch("os.path.exists", new = MagicMock(return_value=True))
 class TestHookBeforeStart(RMFTestCase):
-  def test_configure_default(self):
+  def test_hook_default(self):
     self.executeScript("1.3.2/hooks/before-START/scripts/hook.py",
                        classname="BeforeConfigureHook",
                        command="hook",
@@ -182,3 +182,173 @@ class TestHookBeforeStart(RMFTestCase):
     )
     self.assertNoMoreResources()
 
+  def test_hook_secured(self):
+    self.executeScript("1.3.2/hooks/before-START/scripts/hook.py",
+                       classname="BeforeConfigureHook",
+                       command="hook",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              ignore_failures = True,
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              path = ['/bin/', '/usr/bin'],
+                              only_if = 'test -e /usr/jdk64/jdk1.7.0_45/jre/lib/security && test -f /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              cwd = '/usr/jdk64/jdk1.7.0_45/jre/lib/security',
+                              )
+    self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
+                              content = Template('snmpd.conf.j2'),
+                              )
+    self.assertResourceCalled('Service', 'snmpd',
+                              action = ['restart'],
+                              )
+    self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
+                              only_if = 'test -f /selinux/enforce',
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/lib/hadoop/lib/native/Linux-i386-32; ln -sf /usr/lib/libsnappy.so /usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/lib/hadoop/lib/native/Linux-amd64-64; ln -sf /usr/lib64/libsnappy.so /usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
+                              )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
+                              content = Template('hadoop-env.sh.j2'),
+                              owner = 'root',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/commons-logging.properties',
+                              content = Template('commons-logging.properties.j2'),
+                              owner = 'root',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
+                              content = Template('health_check.j2'),
+                              owner = 'root',
+                              )
+    self.assertResourceCalled('PropertiesFile', '/etc/hadoop/conf/log4j.properties',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0664,
+                              properties = self.getConfig()['configurations']['hdfs-log4j'],
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.driver=.*~ambari.jobhistory.driver=org.postgresql.Driver~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA=.*~log4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.driver=.*~log4j.appender.JHA.driver=${ambari.jobhistory.driver}~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.database=.*~log4j.appender.JHA.database=${ambari.jobhistory.database}~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.logger=.*~ambari.jobhistory.logger=DEBUG,JHA~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.password=.*~log4j.appender.JHA.password=${ambari.jobhistory.password}~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.database=.*~ambari.jobhistory.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=.*~log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=.*~log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=${ambari.jobhistory.logger}~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.user=.*~log4j.appender.JHA.user=${ambari.jobhistory.user}~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.user=.*~ambari.jobhistory.user=mapred~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.password=.*~ambari.jobhistory.password=mapred~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
+                              content = Template('hadoop-metrics2.properties.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
+                              content = StaticFile('task-log4j.properties'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/hadoop-tools.jar',
+                              to = '/usr/lib/hadoop/hadoop-tools.jar',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/configuration.xsl',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertNoMoreResources()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/87b4a367/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
index 76136e8..2d0754c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
@@ -22,7 +22,7 @@ from stacks.utils.RMFTestCase import *
 
 
 class TestHookBeforeInstall(RMFTestCase):
-  def test_configure_default(self):
+  def test_hook_default(self):
     self.executeScript("2.0.6/hooks/before-INSTALL/scripts/hook.py",
                        classname="BeforeConfigureHook",
                        command="hook",

http://git-wip-us.apache.org/repos/asf/ambari/blob/87b4a367/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
index 6045f39..159ba3e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
@@ -23,7 +23,7 @@ from stacks.utils.RMFTestCase import *
 
 @patch("os.path.exists", new = MagicMock(return_value=True))
 class TestHookBeforeStart(RMFTestCase):
-  def test_configure_default(self):
+  def test_hook_default(self):
     self.executeScript("2.0.6/hooks/before-START/scripts/hook.py",
                        classname="BeforeConfigureHook",
                        command="hook",
@@ -189,3 +189,179 @@ class TestHookBeforeStart(RMFTestCase):
     )
     self.assertNoMoreResources()
 
+  def test_hook_secured(self):
+    self.executeScript("2.0.6/hooks/before-START/scripts/hook.py",
+                       classname="BeforeConfigureHook",
+                       command="hook",
+                       config_file="secured.json"
+    )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              ignore_failures = True,
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              path = ['/bin/', '/usr/bin'],
+                              only_if = 'test -e /usr/jdk64/jdk1.7.0_45/jre/lib/security && test -f /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              cwd = '/usr/jdk64/jdk1.7.0_45/jre/lib/security',
+                              )
+    self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
+                              content = Template('snmpd.conf.j2'),
+                              )
+    self.assertResourceCalled('Service', 'snmpd',
+                              action = ['restart'],
+                              )
+    self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
+                              only_if = 'test -f /selinux/enforce',
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/lib/hadoop/lib/native/Linux-i386-32; ln -sf /usr/lib/libsnappy.so /usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so',
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/lib/hadoop/lib/native/Linux-amd64-64; ln -sf /usr/lib64/libsnappy.so /usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
+                              )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/log/hadoop',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/var/run/hadoop',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('Directory', '/tmp',
+                              owner = 'hdfs',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
+                              content = Template('hadoop-env.sh.j2'),
+                              owner = 'root',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/commons-logging.properties',
+                              content = Template('commons-logging.properties.j2'),
+                              owner = 'root',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
+                              content = Template('health_check-v2.j2'),
+                              owner = 'root',
+                              )
+    self.assertResourceCalled('PropertiesFile', '/etc/hadoop/conf/log4j.properties',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0664,
+                              properties = self.getConfig()['configurations']['hdfs-log4j'],
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.driver=.*~###ambari.jobhistory.driver=o~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA=.*~###log4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.driver=.*~###log4j.appender.JHA.driver=${ambari.jobhistory.driver}~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.database=.*~###log4j.appender.JHA.database=${ambari.jobhistory.database}~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.logger=.*~###ambari.jobhistory.logger=DEBUG,JHA~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.password=.*~###log4j.appender.JHA.password=${ambari.jobhistory.password}~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.database=.*~###ambari.jobhistory.database=j~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=.*~###log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=.*~###log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=${ambari.jobhistory.logger}~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?log4j.appender.JHA.user=.*~###log4j.appender.JHA.user=${ambari.jobhistory.user}~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.user=.*~###ambari.jobhistory.user=m~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('Execute', "sed -i 's~\\(###\\)\\?ambari.jobhistory.password=.*~###ambari.jobhistory.password=m~' /etc/hadoop/conf/log4j.properties",
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
+                              content = Template('hadoop-metrics2.properties.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
+                              content = StaticFile('task-log4j.properties'),
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/configuration.xsl',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertNoMoreResources()
\ No newline at end of file