You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2014/04/16 17:21:42 UTC

[1/3] AMBARI-4885. Refactor temporary shared resource for HDFS and MR initialization (Ivan Kozlov via aonishuk)

Repository: ambari
Updated Branches:
  refs/heads/trunk 6fa376aea -> b00d45e54


http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 9c659ea..4cfcfdd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -307,7 +307,7 @@ class TestNamenode(RMFTestCase):
                        command = "start",
                        config_file="ha_secured.json"
     )
-    self.assert_configure_default()
+    self.assert_configure_secured()
     self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
                               owner = 'hdfs',
                               content = Template('exclude_hosts_list.j2'),
@@ -370,6 +370,22 @@ class TestNamenode(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -378,9 +394,25 @@ class TestNamenode(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',
                               recursive = True,
                               mode = 0755,
-                              )
+                              )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
index fc96be5..61aa7ad 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -152,6 +152,22 @@ class TestSNamenode(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -160,9 +176,25 @@ class TestSNamenode(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',
                               mode = 0755,
                               recursive = True,
-                              )
+                              )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
index d33e063..12e25a1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -29,6 +29,22 @@ class TestZkfc(RMFTestCase):
                        command = "start",
                        config_file="ha_default.json"
     )
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
                               owner = 'hdfs',
                               recursive = True,
@@ -39,8 +55,8 @@ class TestZkfc(RMFTestCase):
                               )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
                               action = ['delete'],
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               ignore_failures = True,
-                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
@@ -84,6 +100,22 @@ class TestZkfc(RMFTestCase):
                        command = "start",
                        config_file="ha_secured.json"
     )
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/var/run/hadoop/hdfs',
                               owner = 'hdfs',
                               recursive = True,
@@ -94,8 +126,8 @@ class TestZkfc(RMFTestCase):
                               )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid',
                               action = ['delete'],
+                              not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               ignore_failures = True,
-                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
                               )
     self.assertResourceCalled('Execute', 'ulimit -c unlimited;  export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc',
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/HIVE/_test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/_test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/_test_hive_service_check.py
new file mode 100644
index 0000000..00c24d4
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/_test_hive_service_check.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+import datetime, sys, socket
+import  resource_management.libraries.functions
+@patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
+@patch("socket.socket", new = MagicMock())
+class TestServiceCheck(RMFTestCase):
+
+  @patch("sys.exit")
+  def test_service_check_default(self, sys_exit_mock):
+
+    self.executeScript("2.0.6/services/HIVE/package/scripts/service_check.py",
+                        classname="HiveServiceCheck",
+                        command="service_check",
+                        config_file="default.json"
+    )
+    self.assertResourceCalled('File', '/tmp/hcatSmoke.sh',
+                        content = StaticFile('hcatSmoke.sh'),
+                        mode = 0755,
+    )
+    self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 sh /tmp/hcatSmoke.sh hcatsmoke prepare',
+                        logoutput = True,
+                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        tries = 3,
+                        user = 'ambari-qa',
+                        try_sleep = 5,
+    )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
+                        logoutput = True,
+                        user = 'hdfs',
+                        conf_dir = '/etc/hadoop/conf',
+                        keytab=UnknownConfigurationMock(),
+                        kinit_path_local='/usr/bin/kinit',
+                        security_enabled=False
+    )
+    self.assertResourceCalled('Execute', 'sh /tmp/hcatSmoke.sh hcatsmoke cleanup',
+                        logoutput = True,
+                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        tries = 3,
+                        user = 'ambari-qa',
+                        try_sleep = 5,
+    )
+    self.assertNoMoreResources()
+
+  @patch("sys.exit")
+  def test_service_check_secured(self, sys_exit_mock):
+
+    self.executeScript("2.0.6/services/HIVE/package/scripts/service_check.py",
+                        classname="HiveServiceCheck",
+                        command="service_check",
+                        config_file="secured.json"
+    )
+    self.assertResourceCalled('File', '/tmp/hcatSmoke.sh',
+                        content = StaticFile('hcatSmoke.sh'),
+                        mode = 0755,
+    )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 sh /tmp/hcatSmoke.sh hcatsmoke prepare',
+                        logoutput = True,
+                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        tries = 3,
+                        user = 'ambari-qa',
+                        try_sleep = 5,
+    )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
+                        logoutput = True,
+                        user = 'hdfs',
+                        conf_dir = '/etc/hadoop/conf',
+                        keytab='/etc/security/keytabs/hdfs.headless.keytab',
+                        kinit_path_local='/usr/bin/kinit',
+                        security_enabled=True
+    )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; sh /tmp/hcatSmoke.sh hcatsmoke cleanup',
+                        logoutput = True,
+                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+                        tries = 3,
+                        user = 'ambari-qa',
+                        try_sleep = 5,
+    )
+    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
deleted file mode 100644
index 00c24d4..0000000
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ /dev/null
@@ -1,98 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-from mock.mock import MagicMock, call, patch
-from stacks.utils.RMFTestCase import *
-import datetime, sys, socket
-import  resource_management.libraries.functions
-@patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
-@patch("socket.socket", new = MagicMock())
-class TestServiceCheck(RMFTestCase):
-
-  @patch("sys.exit")
-  def test_service_check_default(self, sys_exit_mock):
-
-    self.executeScript("2.0.6/services/HIVE/package/scripts/service_check.py",
-                        classname="HiveServiceCheck",
-                        command="service_check",
-                        config_file="default.json"
-    )
-    self.assertResourceCalled('File', '/tmp/hcatSmoke.sh',
-                        content = StaticFile('hcatSmoke.sh'),
-                        mode = 0755,
-    )
-    self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 sh /tmp/hcatSmoke.sh hcatsmoke prepare',
-                        logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
-                        tries = 3,
-                        user = 'ambari-qa',
-                        try_sleep = 5,
-    )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
-                        logoutput = True,
-                        user = 'hdfs',
-                        conf_dir = '/etc/hadoop/conf',
-                        keytab=UnknownConfigurationMock(),
-                        kinit_path_local='/usr/bin/kinit',
-                        security_enabled=False
-    )
-    self.assertResourceCalled('Execute', 'sh /tmp/hcatSmoke.sh hcatsmoke cleanup',
-                        logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
-                        tries = 3,
-                        user = 'ambari-qa',
-                        try_sleep = 5,
-    )
-    self.assertNoMoreResources()
-
-  @patch("sys.exit")
-  def test_service_check_secured(self, sys_exit_mock):
-
-    self.executeScript("2.0.6/services/HIVE/package/scripts/service_check.py",
-                        classname="HiveServiceCheck",
-                        command="service_check",
-                        config_file="secured.json"
-    )
-    self.assertResourceCalled('File', '/tmp/hcatSmoke.sh',
-                        content = StaticFile('hcatSmoke.sh'),
-                        mode = 0755,
-    )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 sh /tmp/hcatSmoke.sh hcatsmoke prepare',
-                        logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
-                        tries = 3,
-                        user = 'ambari-qa',
-                        try_sleep = 5,
-    )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
-                        logoutput = True,
-                        user = 'hdfs',
-                        conf_dir = '/etc/hadoop/conf',
-                        keytab='/etc/security/keytabs/hdfs.headless.keytab',
-                        kinit_path_local='/usr/bin/kinit',
-                        security_enabled=True
-    )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; sh /tmp/hcatSmoke.sh hcatsmoke cleanup',
-                        logoutput = True,
-                        path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
-                        tries = 3,
-                        user = 'ambari-qa',
-                        try_sleep = 5,
-    )
-    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 25525c4..ad13501 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -261,6 +261,22 @@ class TestHistoryServer(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/app-logs',
@@ -413,3 +429,26 @@ class TestHistoryServer(RMFTestCase):
       group = 'hadoop',
       mode = 0644,
     )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index 95d5b48..3c3745b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -120,6 +120,22 @@ class TestMapReduce2Client(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
     self.assertNoMoreResources()
 
   def test_configure_secured(self):
@@ -219,5 +235,28 @@ class TestMapReduce2Client(RMFTestCase):
       group = 'hadoop',
       mode = 0644,
     )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index e966b46..8941333 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -261,6 +261,22 @@ class TestNodeManager(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/app-logs',
@@ -413,4 +429,26 @@ class TestNodeManager(RMFTestCase):
       group = 'hadoop',
       mode = 0644,
     )
-    
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index e5f38b7..f4e9604 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -205,6 +205,22 @@ class TestResourceManager(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
 
   def assert_configure_secured(self):
 
@@ -301,4 +317,27 @@ class TestResourceManager(RMFTestCase):
       content = Template('container-executor.cfg.j2'),
       group = 'hadoop',
       mode = 0644,
-    )
\ No newline at end of file
+    )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
index 25c72f6..c8f787e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
@@ -121,6 +121,22 @@ class TestYarnClient(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
     self.assertNoMoreResources()
 
   def test_configure_secured(self):
@@ -220,6 +236,29 @@ class TestYarnClient(RMFTestCase):
       group = 'hadoop',
       mode = 0644,
     )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
     self.assertNoMoreResources()
 
   def test_restart_client(self):
@@ -321,6 +360,22 @@ class TestYarnClient(RMFTestCase):
       group = 'hadoop',
       mode = 0755,
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )
     self.assertNoMoreResources()
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
index 8559db0..3aa370c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
@@ -28,6 +28,19 @@ class TestHookBeforeInstall(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              ignore_failures = True,
+                              path = ['/bin', '/usr/bin/'],
+                              )
     self.assertResourceCalled('Group', 'hadoop', )
     self.assertResourceCalled('Group', 'users', )
     self.assertResourceCalled('Group', 'users', )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
index 7efb056..9841c3f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
@@ -29,19 +29,6 @@ class TestHookBeforeStart(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              ignore_failures = True,
-                              path = ['/bin', '/usr/bin/'],
-                              )
     self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
                               only_if = 'test -f /selinux/enforce',
                               )
@@ -68,16 +55,6 @@ class TestHookBeforeStart(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
-                              content = Template('hdfs.conf.j2'),
-                              owner = 'root',
-                              group = 'root',
-                              mode = 0644,
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
-                              content = Template('taskcontroller.cfg.j2'),
-                              owner = 'hdfs',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
                               content = Template('hadoop-env.sh.j2'),
                               owner = 'hdfs',
@@ -86,10 +63,6 @@ class TestHookBeforeStart(RMFTestCase):
                               content = Template('commons-logging.properties.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
-                              content = Template('slaves.j2'),
-                              owner = 'hdfs',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
                               content = Template('health_check-v2.j2'),
                               owner = 'hdfs',
@@ -105,64 +78,30 @@ class TestHookBeforeStart(RMFTestCase):
                               content = Template('hadoop-metrics2.properties.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
                               conf_dir = '/etc/hadoop/conf',
                               configurations = self.getConfig()['configurations']['core-site'],
                               )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['mapred-site'],
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
                               content = StaticFile('task-log4j.properties'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
-                              )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['hdfs-site'],
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/configuration.xsl',
       owner = 'hdfs',
       group = 'hadoop',
     )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
     self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
       owner = 'hdfs',
       group = 'hadoop',
     )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
     self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
                               content = Template('snmpd.conf.j2'),
                               )
     self.assertResourceCalled('Execute', 'service snmpd start; chkconfig snmpd on',
-                            path = ['/usr/local/bin/:/bin/:/sbin/'],
-                            )
+                              path = ['/usr/local/bin/:/bin/:/sbin/'],
+                              )
     self.assertNoMoreResources()
 
   def test_hook_secured(self):
@@ -171,24 +110,6 @@ class TestHookBeforeStart(RMFTestCase):
                        command="hook",
                        config_file="secured.json"
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              ignore_failures = True,
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              path = ['/bin/', '/usr/bin'],
-                              only_if = 'test -e /usr/jdk64/jdk1.7.0_45/jre/lib/security && test -f /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              cwd = '/usr/jdk64/jdk1.7.0_45/jre/lib/security',
-                              )
     self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
                               only_if = 'test -f /selinux/enforce',
                               )
@@ -215,23 +136,6 @@ class TestHookBeforeStart(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
-                              content = Template('hdfs.conf.j2'),
-                              owner = 'root',
-                              group = 'root',
-                              mode = 0644,
-                              )
-    self.assertResourceCalled('File', '/usr/lib/hadoop/sbin/task-controller',
-                              owner = 'root',
-                              group = 'hadoop',
-                              mode = 06050,
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
-                              content = Template('taskcontroller.cfg.j2'),
-                              owner = 'root',
-                              group = 'hadoop',
-                              mode = 0644,
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
                               content = Template('hadoop-env.sh.j2'),
                               owner = 'root',
@@ -240,10 +144,6 @@ class TestHookBeforeStart(RMFTestCase):
                               content = Template('commons-logging.properties.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
-                              content = Template('slaves.j2'),
-                              owner = 'root',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
                               content = Template('health_check-v2.j2'),
                               owner = 'root',
@@ -259,58 +159,24 @@ class TestHookBeforeStart(RMFTestCase):
                               content = Template('hadoop-metrics2.properties.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
                               conf_dir = '/etc/hadoop/conf',
                               configurations = self.getConfig()['configurations']['core-site'],
                               )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['mapred-site'],
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
                               content = StaticFile('task-log4j.properties'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
-                              )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['hdfs-site'],
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/configuration.xsl',
                               owner = 'hdfs',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
                               owner = 'hdfs',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
                               content = Template('snmpd.conf.j2'),
                               )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
index 0a35276..5b84347 100644
--- a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
+++ b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
@@ -155,3 +155,19 @@ class TestAppTimelineServer(RMFTestCase):
                               group = 'hadoop',
                               mode = 0755,
                               )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('XmlConfig', 'capacity-scheduler.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['capacity-scheduler'],
+                              )


[2/3] AMBARI-4885. Refactor temporary shared resource for HDFS and MR initialization (Ivan Kozlov via aonishuk)

Posted by ao...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hdfs.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hdfs.conf.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hdfs.conf.j2
deleted file mode 100644
index d58a6f5..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/hdfs.conf.j2
+++ /dev/null
@@ -1,35 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-{{hdfs_user}}   - nofile 32768
-{{hdfs_user}}   - nproc  65536

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/slaves.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/slaves.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/slaves.j2
deleted file mode 100644
index 4a9e713..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/slaves.j2
+++ /dev/null
@@ -1,21 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-{% for host in slave_hosts %}
-{{host}}
-{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/taskcontroller.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/taskcontroller.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/taskcontroller.cfg.j2
deleted file mode 100644
index 3d5f4f2..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/templates/taskcontroller.cfg.j2
+++ /dev/null
@@ -1,38 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-#/*
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements.  See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership.  The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License.  You may obtain a copy of the License at
-# *
-# *     http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
-mapred.local.dir={{mapred_local_dir}}
-mapreduce.tasktracker.group={{mapred_tt_group}}
-hadoop.log.dir={{hdfs_log_dir_prefix}}/{{mapred_user}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py
index 57fdb35..b925071 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/datanode.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_datanode import datanode
+from hdfs import hdfs
 
 
 class DataNode(Script):
@@ -43,7 +44,8 @@ class DataNode(Script):
 
   def configure(self, env):
     import params
-
+    env.set_params(params)
+    hdfs()
     datanode(action="configure")
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
new file mode 100644
index 0000000..06bf583
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python2.6
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+import sys
+import os
+
+
+def hdfs(name=None):
+  import params
+
+  File(os.path.join(params.limits_conf_dir, 'hdfs.conf'),
+       owner='root',
+       group='root',
+       mode=0644,
+       content=Template("hdfs.conf.j2")
+  )
+
+  if params.security_enabled:
+    tc_mode = 0644
+    tc_owner = "root"
+  else:
+    tc_mode = None
+    tc_owner = params.hdfs_user
+
+  if "hadoop-policy" in params.config['configurations']:
+    XmlConfig("hadoop-policy.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['hadoop-policy'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )
+
+  XmlConfig("hdfs-site.xml",
+            conf_dir=params.hadoop_conf_dir,
+            configurations=params.config['configurations']['hdfs-site'],
+            owner=params.hdfs_user,
+            group=params.user_group
+  )
+
+  File(os.path.join(params.hadoop_conf_dir, 'slaves'),
+       owner=tc_owner,
+       content=Template("slaves.j2")
+  )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py
index 6220326..94edd21 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_client.py
@@ -18,6 +18,7 @@ limitations under the License.
 """
 
 from resource_management import *
+from hdfs import hdfs
 from utils import service
 
 
@@ -44,20 +45,8 @@ class HdfsClient(Script):
 
   def config(self, env):
     import params
-
-    XmlConfig("core-site.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['core-site'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-    
-    XmlConfig("hdfs-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['hdfs-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-    )
+    hdfs()
+    pass
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
index a707e2e..e833611 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from utils import service
+from hdfs import hdfs
 
 
 class JournalNode(Script):
@@ -57,6 +58,8 @@ class JournalNode(Script):
               owner=params.hdfs_user,
               group=params.user_group
     )
+    env.set_params(params)
+    hdfs()
     pass
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
index 2179292..8aa9bd2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_namenode import namenode
+from hdfs import hdfs
 
 
 class NameNode(Script):
@@ -47,6 +48,7 @@ class NameNode(Script):
     import params
 
     env.set_params(params)
+    hdfs()
     namenode(action="configure")
     pass
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index d55488d..a12b3e7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -160,3 +160,5 @@ HdfsDirectory = functools.partial(
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local
 )
+
+limits_conf_dir = "/etc/security/limits.d"

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
index b2a3bd1..62fe2bc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/snamenode.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_snamenode import snamenode
+from hdfs import hdfs
 
 
 class SNameNode(Script):
@@ -49,7 +50,7 @@ class SNameNode(Script):
     import params
 
     env.set_params(params)
-
+    hdfs()
     snamenode(action="configure")
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
index f415f24..c95800e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/zkfc_slave.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from utils import service
+from hdfs import hdfs
 
 
 class ZkfcSlave(Script):
@@ -48,6 +49,7 @@ class ZkfcSlave(Script):
     )
 
   def configure(self, env):
+    hdfs()
     pass
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/hdfs.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/hdfs.conf.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/hdfs.conf.j2
new file mode 100644
index 0000000..d58a6f5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/hdfs.conf.j2
@@ -0,0 +1,35 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{{hdfs_user}}   - nofile 32768
+{{hdfs_user}}   - nproc  65536

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/slaves.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/slaves.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/slaves.j2
new file mode 100644
index 0000000..4a9e713
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/templates/slaves.j2
@@ -0,0 +1,21 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+{% for host in slave_hosts %}
+{{host}}
+{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
index 64e8f81..ca204f6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
@@ -124,3 +124,12 @@ HdfsDirectory = functools.partial(
   kinit_path_local = kinit_path_local
 )
 update_exclude_file_only = config['commandParams']['update_exclude_file_only']
+
+hadoop_bin = "/usr/lib/hadoop/sbin"
+mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
+
+#taskcontroller.cfg
+
+mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
+hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix']
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
index 4c71566..0ae7f52 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
@@ -22,6 +22,7 @@ Ambari Agent
 
 from resource_management import *
 import sys
+import os
 
 
 def yarn(name = None):
@@ -151,3 +152,79 @@ def yarn(name = None):
     )
 
 
+  if params.security_enabled:
+    tc_mode = 0644
+    tc_owner = "root"
+  else:
+    tc_mode = None
+    tc_owner = params.hdfs_user
+
+  if params.security_enabled:
+    File(os.path.join(params.hadoop_bin, "task-controller"),
+         owner="root",
+         group=params.mapred_tt_group,
+         mode=06050
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+         owner = tc_owner,
+         mode = tc_mode,
+         group = params.mapred_tt_group,
+         content=Template("taskcontroller.cfg.j2")
+    )
+  else:
+    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+         owner=tc_owner,
+         content=Template("taskcontroller.cfg.j2")
+    )
+
+  if "mapred-site" in params.config['configurations']:
+    XmlConfig("mapred-site.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['mapred-site'],
+              owner=params.mapred_user,
+              group=params.user_group
+    )
+
+  if "mapred-queue-acls" in params.config['configurations']:
+    XmlConfig("mapred-queue-acls.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations'][
+                'mapred-queue-acls'],
+              owner=params.mapred_user,
+              group=params.user_group
+    )
+  elif os.path.exists(
+    os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")):
+    File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if "capacity-scheduler" in params.config['configurations']:
+    XmlConfig("capacity-scheduler.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations'][
+                'capacity-scheduler'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )
+
+  if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')):
+    File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if os.path.exists(
+    os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')):
+    File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if os.path.exists(
+    os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')):
+    File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/templates/taskcontroller.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/templates/taskcontroller.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/templates/taskcontroller.cfg.j2
new file mode 100644
index 0000000..3d5f4f2
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/templates/taskcontroller.cfg.j2
@@ -0,0 +1,38 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+#/*
+# * Licensed to the Apache Software Foundation (ASF) under one
+# * or more contributor license agreements.  See the NOTICE file
+# * distributed with this work for additional information
+# * regarding copyright ownership.  The ASF licenses this file
+# * to you under the Apache License, Version 2.0 (the
+# * "License"); you may not use this file except in compliance
+# * with the License.  You may obtain a copy of the License at
+# *
+# *     http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# */
+mapred.local.dir={{mapred_local_dir}}
+mapreduce.tasktracker.group={{mapred_tt_group}}
+hadoop.log.dir={{hdfs_log_dir_prefix}}/{{mapred_user}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
index 6f51a8c..3290865 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
@@ -105,6 +105,7 @@ class TestDatanode(RMFTestCase):
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
                               user = 'root',
                               )
+    self.assertNoMoreResources()
 
   def test_stop_secured(self):
     self.executeScript("1.3.2/services/HDFS/package/scripts/datanode.py",
@@ -129,8 +130,23 @@ class TestDatanode(RMFTestCase):
                               ignore_failures = True,
                               )
     self.assertNoMoreResources()
-
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -138,19 +154,35 @@ class TestDatanode(RMFTestCase):
                               recursive = True,
                               )
     self.assertResourceCalled('Directory', '/hadoop/hdfs',
+                              ignore_failures = True,
                               mode = 0755,
                               recursive = True,
-                              ignore_failures=True,
                               )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
                               owner = 'hdfs',
+                              ignore_failures = True,
                               group = 'hadoop',
                               mode = 0750,
                               recursive = False,
-                              ignore_failures=True,
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
index 6efa948..f60c7a2 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
@@ -203,6 +203,25 @@ class TestNamenode(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
+
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -211,6 +230,25 @@ class TestNamenode(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
+
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
index 058c19a..7214ae7 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
@@ -75,6 +75,7 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               ignore_failures = True,
                               )
+    self.assertNoMoreResources()
 
   def test_configure_secured(self):
     self.executeScript("1.3.2/services/HDFS/package/scripts/snamenode.py",
@@ -104,6 +105,7 @@ class TestSNamenode(RMFTestCase):
                               not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
                               user = 'hdfs',
                               )
+    self.assertNoMoreResources()
 
   def test_stop_secured(self):
     self.executeScript("1.3.2/services/HDFS/package/scripts/snamenode.py",
@@ -127,8 +129,25 @@ class TestSNamenode(RMFTestCase):
                               action = ['delete'],
                               ignore_failures = True,
                               )
+    self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -137,6 +156,22 @@ class TestSNamenode(RMFTestCase):
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_client.py b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_client.py
index d581903..dbdbd31 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_client.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_client.py
@@ -62,6 +62,16 @@ class TestMapreduceClient(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
     self.assertNoMoreResources()
 
   def test_configure_secured(self):
@@ -99,5 +109,21 @@ class TestMapreduceClient(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
-
+    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
     self.assertNoMoreResources()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py
index d22f4ba..8944bc3 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_historyserver.py
@@ -198,6 +198,16 @@ class TestHistoryServer(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/mapred',
@@ -285,3 +295,20 @@ class TestHistoryServer(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py
index 255ba1a..aeab2ee 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_jobtracker.py
@@ -22,6 +22,7 @@ from stacks.utils.RMFTestCase import *
 
 class TestJobtracker(RMFTestCase):
 
+  @patch("os.path.exists", new = MagicMock(return_value=True))
   def test_configure_default(self):
     self.executeScript("1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py",
                        classname = "Jobtracker",
@@ -31,13 +32,13 @@ class TestJobtracker(RMFTestCase):
     self.assert_configure_default()
     self.assertNoMoreResources()
 
+  @patch("os.path.exists", new = MagicMock(return_value=True))
   def test_start_default(self):
     self.executeScript("1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py",
                        classname = "Jobtracker",
                        command = "start",
                        config_file="default.json"
       )
-
     self.assert_configure_default()
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start jobtracker',
                        user = 'mapred',
@@ -98,6 +99,7 @@ class TestJobtracker(RMFTestCase):
                               )
     self.assertNoMoreResources()
 
+  @patch("os.path.exists", new = MagicMock(return_value=True))
   def test_configure_secured(self):
 
     self.executeScript("1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py",
@@ -108,13 +110,13 @@ class TestJobtracker(RMFTestCase):
     self.assert_configure_secured()
     self.assertNoMoreResources()
 
+  @patch("os.path.exists", new = MagicMock(return_value=True))
   def test_start_secured(self):
     self.executeScript("1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py",
                        classname = "Jobtracker",
                        command = "start",
                        config_file="secured.json"
     )
-
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start jobtracker',
                        user = 'mapred',
@@ -257,6 +259,32 @@ class TestJobtracker(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('HdfsDirectory', '/mapred',
@@ -349,3 +377,36 @@ class TestJobtracker(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py
index bab7f71..d95c74a 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/MAPREDUCE/test_mapreduce_tasktracker.py
@@ -37,7 +37,6 @@ class TestTasktracker(RMFTestCase):
                          command = "start",
                          config_file="default.json"
       )
-
     self.assert_configure_default()
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker',
                               user = 'mapred',
@@ -80,7 +79,6 @@ class TestTasktracker(RMFTestCase):
                          command = "start",
                          config_file="secured.json"
     )
-
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start tasktracker',
                               user = 'mapred',
@@ -141,6 +139,16 @@ class TestTasktracker(RMFTestCase):
       owner = 'mapred',
       group = 'hadoop',
     )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/var/run/hadoop/mapred',
@@ -170,4 +178,21 @@ class TestTasktracker(RMFTestCase):
     self.assertResourceCalled('File', '/etc/hadoop/conf/mapred.include',
       owner = 'mapred',
       group = 'hadoop',
-    )
\ No newline at end of file
+    )
+    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 06050,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
+                              content = Template('taskcontroller.cfg.j2'),
+                              owner = 'root',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              owner = 'mapred',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['mapred-site'],
+                              )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/1.3.2/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/1.3.2/hooks/after-INSTALL/test_after_install.py
new file mode 100644
index 0000000..1cc6f6f
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/1.3.2/hooks/after-INSTALL/test_after_install.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+
+
+@patch("os.path.exists", new = MagicMock(return_value=True))
+class TestHookAfterInstall(RMFTestCase):
+  def test_hook_default(self):
+    self.executeScript("1.3.2/hooks/after-INSTALL/scripts/hook.py",
+                       classname="AfterInstallHook",
+                       command="hook",
+                       config_file="default.json"
+    )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True,
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
+                              content = Template('hadoop-env.sh.j2'),
+                              owner = 'hdfs',
+                              )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
+    self.assertNoMoreResources()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
index 3867f48..fb1c134 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
@@ -28,50 +28,81 @@ class TestHookBeforeInstall(RMFTestCase):
                        command="hook",
                        config_file="default.json"
     )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
+                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
+                              path = ['/bin', '/usr/bin/'],
+                              )
+    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
+                              ignore_failures = True,
+                              path = ['/bin', '/usr/bin/'],
+                              )
     self.assertResourceCalled('Group', 'hadoop',)
     self.assertResourceCalled('Group', 'users',)
     self.assertResourceCalled('Group', 'users',)
     self.assertResourceCalled('User', 'ambari-qa',
-                              gid='hadoop',
-                              groups=[u'users'],)
+                              gid = 'hadoop',
+                              groups = [u'users'],
+                              )
     self.assertResourceCalled('File', '/tmp/changeUid.sh',
-                              content=StaticFile('changeToSecureUid.sh'),
-                              mode=0555,)
-    self.assertResourceCalled('Execute',
-                              '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
-                              not_if='test $(id -u ambari-qa) -gt 1000',)
+                              content = StaticFile('changeToSecureUid.sh'),
+                              mode = 0555,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null',
+                              not_if = 'test $(id -u ambari-qa) -gt 1000',
+                              )
     self.assertResourceCalled('User', 'hbase',
-                              gid='hadoop',
-                              groups=[u'hadoop'],)
+                              gid = 'hadoop',
+                              groups = [u'hadoop'],
+                              )
     self.assertResourceCalled('File', '/tmp/changeUid.sh',
-                              content=StaticFile('changeToSecureUid.sh'),
-                              mode=0555,)
-    self.assertResourceCalled('Execute',
-                              '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
-                              not_if='test $(id -u hbase) -gt 1000',)
+                              content = StaticFile('changeToSecureUid.sh'),
+                              mode = 0555,
+                              )
+    self.assertResourceCalled('Execute', '/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null',
+                              not_if = 'test $(id -u hbase) -gt 1000',
+                              )
     self.assertResourceCalled('Group', 'nagios',)
-    self.assertResourceCalled('User', 'nagios', gid='nagios',)
-    self.assertResourceCalled('User', 'oozie', gid='hadoop',)
-    self.assertResourceCalled('User', 'hcat', gid='hadoop',)
-    self.assertResourceCalled('User', 'hcat', gid='hadoop',)
+    self.assertResourceCalled('User', 'nagios',
+                              gid = 'nagios',
+                              )
+    self.assertResourceCalled('User', 'oozie',
+                              gid = 'hadoop',
+                              )
+    self.assertResourceCalled('User', 'hcat',
+                              gid = 'hadoop',
+                              )
+    self.assertResourceCalled('User', 'hcat',
+                              gid = 'hadoop',
+                              )
     self.assertResourceCalled('User', 'hive',
-                              gid='hadoop',)
+                              gid = 'hadoop',
+                              )
     self.assertResourceCalled('Group', 'nobody',)
     self.assertResourceCalled('Group', 'nobody',)
     self.assertResourceCalled('User', 'nobody',
-                              gid='hadoop',
-                              groups=[u'nobody'],)
+                              gid = 'hadoop',
+                              groups = [u'nobody'],
+                              )
     self.assertResourceCalled('User', 'nobody',
-                              gid='hadoop',
-                              groups=[u'nobody'],)
+                              gid = 'hadoop',
+                              groups = [u'nobody'],
+                              )
     self.assertResourceCalled('User', 'hdfs',
-                              gid='hadoop',
-                              groups=[u'hadoop'],)
+                              gid = 'hadoop',
+                              groups = [u'hadoop'],
+                              )
     self.assertResourceCalled('User', 'mapred',
-                              gid='hadoop',
-                              groups=[u'hadoop'],)
+                              gid = 'hadoop',
+                              groups = [u'hadoop'],
+                              )
     self.assertResourceCalled('User', 'zookeeper',
-                              gid='hadoop',)
+                              gid = 'hadoop',
+                              )
     self.assertResourceCalled('Package', 'unzip',)
     self.assertResourceCalled('Package', 'net-snmp',)
-    self.assertNoMoreResources()
+    self.assertNoMoreResources()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
index 1ef57aa..35061f7 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-START/test_before_start.py
@@ -19,29 +19,18 @@ limitations under the License.
 '''
 
 from mock.mock import MagicMock, call, patch
+from resource_management import *
 from stacks.utils.RMFTestCase import *
 
 @patch("os.path.exists", new = MagicMock(return_value=True))
+@patch.object(Hook, "run_custom_hook")
 class TestHookBeforeStart(RMFTestCase):
-  def test_hook_default(self):
+  def test_hook_default(self, mockHook):
     self.executeScript("1.3.2/hooks/before-START/scripts/hook.py",
                        classname="BeforeConfigureHook",
                        command="hook",
                        config_file="default.json"
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              ignore_failures = True,
-                              path = ['/bin', '/usr/bin/'],
-                              )
     self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
                               only_if = 'test -f /selinux/enforce',
                               )
@@ -49,120 +38,55 @@ class TestHookBeforeStart(RMFTestCase):
                               )
     self.assertResourceCalled('Execute', 'mkdir -p /usr/lib/hadoop/lib/native/Linux-amd64-64; ln -sf /usr/lib64/libsnappy.so /usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
     self.assertResourceCalled('Directory', '/var/log/hadoop',
                               owner = 'root',
                               group = 'root',
                               recursive = True,
                               )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
-    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
-                              content = Template('hdfs.conf.j2'),
-                              owner = 'root',
-                              group = 'root',
-                              mode = 0644,
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
-                              content = Template('taskcontroller.cfg.j2'),
-                              owner = 'hdfs',
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
-                              content = Template('hadoop-env.sh.j2'),
-                              owner = 'hdfs',
-                              )
+                            owner = 'root',
+                            group = 'root',
+                            recursive = True,
+                            )
     self.assertResourceCalled('File', '/etc/hadoop/conf/commons-logging.properties',
                               content = Template('commons-logging.properties.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
-                              content = Template('slaves.j2'),
-                              owner = 'hdfs',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
                               content = Template('health_check.j2'),
                               owner = 'hdfs',
                               )
-    rca_properties = format('''
-
-log4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender
-log4j.appender.JHA.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca
-log4j.appender.JHA.driver=org.postgresql.Driver
-log4j.appender.JHA.user=mapred
-log4j.appender.JHA.password=mapred
-
-log4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA
-log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
-
-''')
-    self.maxDiff = None
-    self.assertResourceCalled('File',
-                              '/etc/hadoop/conf/log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hdfs',
-                              content='log4jproperties\nline2log4jproperties\nline2'+rca_properties
-    )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/log4j.properties',
+                              content = 'log4jproperties\nline2log4jproperties\nline2\n\nlog4j.appender.JHA=org.apache.ambari.log4j.hadoop.mapreduce.jobhistory.JobHistoryAppender\nlog4j.appender.JHA.database=jdbc:postgresql://c6401.ambari.apache.org/ambarirca\nlog4j.appender.JHA.driver=org.postgresql.Driver\nlog4j.appender.JHA.user=mapred\nlog4j.appender.JHA.password=mapred\n\nlog4j.logger.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=DEBUG,JHA\nlog4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true\n\n',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
-      content = Template('hadoop-metrics2.properties.j2'),
-      owner = 'hdfs',
-    )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
+                              content = Template('hadoop-metrics2.properties.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
-      owner = 'hdfs',
-      group = 'hadoop',
-      conf_dir = '/etc/hadoop/conf',
-      configurations = self.getConfig()['configurations']['core-site'],
-    )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-      owner = 'mapred',
-      group = 'hadoop',
-      conf_dir = '/etc/hadoop/conf',
-      configurations = self.getConfig()['configurations']['mapred-site'],
-    )
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
-      content = StaticFile('task-log4j.properties'),
-      mode = 0755,
-    )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-      owner = 'hdfs',
-      group = 'hadoop',
-      conf_dir = '/etc/hadoop/conf',
-      configurations = self.getConfig()['configurations']['hdfs-site'],
-    )
+                              content = StaticFile('task-log4j.properties'),
+                              mode = 0755,
+                              )
     self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/hadoop-tools.jar',
-      to = '/usr/lib/hadoop/hadoop-tools.jar',
-    )
+                              to = '/usr/lib/hadoop/hadoop-tools.jar',
+                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/configuration.xsl',
-      owner = 'hdfs',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
       owner = 'hdfs',
       group = 'hadoop',
     )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
-      owner = 'mapred',
-      group = 'hadoop',
-    )
     self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
                               content = Template('snmpd.conf.j2'),
                               )
@@ -171,30 +95,12 @@ log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
                               )
     self.assertNoMoreResources()
 
-  def test_hook_secured(self):
+  def test_hook_secured(self, mockHook):
     self.executeScript("1.3.2/hooks/before-START/scripts/hook.py",
                        classname="BeforeConfigureHook",
                        command="hook",
                        config_file="secured.json"
     )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/ ; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//jdk-7u45-linux-x64.tar.gz -o /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /usr/jdk64 ; cd /usr/jdk64 ; tar -xf /tmp/HDP-artifacts//jdk-7u45-linux-x64.tar.gz > /dev/null 2>&1',
-                              not_if = 'test -e /usr/jdk64/jdk1.7.0_45/bin/java',
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'mkdir -p /tmp/HDP-artifacts/; curl -kf --retry 10 http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              not_if = 'test -e /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              ignore_failures = True,
-                              path = ['/bin', '/usr/bin/'],
-                              )
-    self.assertResourceCalled('Execute', 'rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              path = ['/bin/', '/usr/bin'],
-                              only_if = 'test -e /usr/jdk64/jdk1.7.0_45/jre/lib/security && test -f /tmp/HDP-artifacts//UnlimitedJCEPolicyJDK7.zip',
-                              cwd = '/usr/jdk64/jdk1.7.0_45/jre/lib/security',
-                              )
     self.assertResourceCalled('Execute', '/bin/echo 0 > /selinux/enforce',
                               only_if = 'test -f /selinux/enforce',
                               )
@@ -202,11 +108,6 @@ log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
                               )
     self.assertResourceCalled('Execute', 'mkdir -p /usr/lib/hadoop/lib/native/Linux-amd64-64; ln -sf /usr/lib64/libsnappy.so /usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so',
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
-                              owner = 'root',
-                              group = 'root',
-                              recursive = True,
-                              )
     self.assertResourceCalled('Directory', '/var/log/hadoop',
                               owner = 'root',
                               group = 'root',
@@ -217,75 +118,30 @@ log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
                               group = 'root',
                               recursive = True,
                               )
-    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
-                              content = Template('hdfs.conf.j2'),
-                              owner = 'root',
-                              group = 'root',
-                              mode = 0644,
-                              )
-    self.assertResourceCalled('File', '/usr/lib/hadoop/bin/task-controller',
-                              owner = 'root',
-                              group = 'hadoop',
-                              mode = 06050,
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/taskcontroller.cfg',
-                              content = Template('taskcontroller.cfg.j2'),
-                              owner = 'root',
-                              group = 'hadoop',
-                              mode = 0644,
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
-                              content = Template('hadoop-env.sh.j2'),
-                              owner = 'root',
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/commons-logging.properties',
-                              content = Template('commons-logging.properties.j2'),
-                              owner = 'root',
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
-                              content = Template('slaves.j2'),
-                              owner = 'root',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
                               content = Template('health_check.j2'),
                               owner = 'root',
                               )
     self.assertResourceCalled('File', '/etc/hadoop/conf/log4j.properties',
+                              content = 'log4jproperties\nline2',
                               owner = 'hdfs',
                               group = 'hadoop',
                               mode = 0644,
-                              content = 'log4jproperties\nline2'
                               )
     self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
                               content = Template('hadoop-metrics2.properties.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/mapred-queue-acls.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
                               conf_dir = '/etc/hadoop/conf',
                               configurations = self.getConfig()['configurations']['core-site'],
                               )
-    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['mapred-site'],
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
                               content = StaticFile('task-log4j.properties'),
                               mode = 0755,
                               )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['hdfs-site'],
-                              )
     self.assertResourceCalled('Link', '/usr/lib/hadoop/lib/hadoop-tools.jar',
                               to = '/usr/lib/hadoop/hadoop-tools.jar',
                               )
@@ -293,27 +149,14 @@ log4j.additivity.org.apache.hadoop.mapred.JobHistory$JobHistoryLogger=true
                               owner = 'hdfs',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/fair-scheduler.xml',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
                               owner = 'hdfs',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-client.xml.example',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/ssl-server.xml.example',
-                              owner = 'mapred',
-                              group = 'hadoop',
-                              )
     self.assertResourceCalled('File', '/etc/snmp/snmpd.conf',
                               content = Template('snmpd.conf.j2'),
                               )
     self.assertResourceCalled('Execute', 'service snmpd start; chkconfig snmpd on',
                               path = ['/usr/local/bin/:/bin/:/sbin/'],
-                             )
-    self.assertNoMoreResources()
-
+                              )
+    self.assertNoMoreResources()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 4ee2571..ddded15 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -152,6 +152,22 @@ class TestDatanode(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -160,13 +176,29 @@ class TestDatanode(RMFTestCase):
                               )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
                               owner = 'hdfs',
+                              ignore_failures = True,
                               group = 'hadoop',
                               mode = 0755,
                               recursive = True,
-                              ignore_failures=True,
                               )
 
   def assert_configure_secured(self):
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -175,8 +207,8 @@ class TestDatanode(RMFTestCase):
                               )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/data',
                               owner = 'hdfs',
+                              ignore_failures = True,
                               group = 'hadoop',
                               mode = 0755,
                               recursive = True,
-                              ignore_failures=True,
                               )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index 380f0f8..3f19eb2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -157,7 +157,22 @@ class TestJournalnode(RMFTestCase):
                               group = 'hadoop',
                               recursive = True,
                               )
-
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'hdfs',
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
@@ -165,3 +180,19 @@ class TestJournalnode(RMFTestCase):
                               group = 'hadoop',
                               recursive = True,
                               )
+    self.assertResourceCalled('File', '/etc/security/limits.d/hdfs.conf',
+                              content = Template('hdfs.conf.j2'),
+                              owner = 'root',
+                              group = 'root',
+                              mode = 0644,
+                              )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+                              content = Template('slaves.j2'),
+                              owner = 'root',
+                              )
\ No newline at end of file


[3/3] git commit: AMBARI-4885. Refactor temporary shared resource for HDFS and MR initialization (Ivan Kozlov via aonishuk)

Posted by ao...@apache.org.
AMBARI-4885. Refactor temporary shared resource for HDFS and MR initialization (Ivan Kozlov via aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b00d45e5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b00d45e5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b00d45e5

Branch: refs/heads/trunk
Commit: b00d45e544ac96727da0bc57209826dadd5d581c
Parents: 6fa376a
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Wed Apr 16 18:20:23 2014 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Wed Apr 16 18:21:18 2014 +0300

----------------------------------------------------------------------
 .../libraries/script/hook.py                    |   6 +-
 .../1.3.2/hooks/after-INSTALL/scripts/hook.py   |  36 +++
 .../1.3.2/hooks/after-INSTALL/scripts/params.py |  61 +++++
 .../scripts/shared_initialization.py            |  45 ++++
 .../after-INSTALL/templates/hadoop-env.sh.j2    | 139 ++++++++++++
 .../1.3.2/hooks/before-INSTALL/scripts/hook.py  |   1 +
 .../hooks/before-INSTALL/scripts/params.py      |  43 ++++
 .../scripts/shared_initialization.py            |  43 ++++
 .../1.3.2/hooks/before-RESTART/scripts/hook.py  |   2 +-
 .../1.3.2/hooks/before-START/scripts/hook.py    |   3 +-
 .../1.3.2/hooks/before-START/scripts/params.py  |   9 -
 .../scripts/shared_initialization.py            | 154 +------------
 .../before-START/templates/hadoop-env.sh.j2     | 139 ------------
 .../hooks/before-START/templates/hdfs.conf.j2   |  35 ---
 .../hooks/before-START/templates/slaves.j2      |  21 --
 .../templates/taskcontroller.cfg.j2             |  38 ----
 .../services/HDFS/package/scripts/datanode.py   |   3 +
 .../1.3.2/services/HDFS/package/scripts/hdfs.py |  63 ++++++
 .../HDFS/package/scripts/hdfs_client.py         |  16 +-
 .../services/HDFS/package/scripts/namenode.py   |   2 +
 .../services/HDFS/package/scripts/params.py     |   4 +-
 .../services/HDFS/package/scripts/snamenode.py  |   3 +-
 .../HDFS/package/templates/hdfs.conf.j2         |  35 +++
 .../services/HDFS/package/templates/slaves.j2   |  21 ++
 .../MAPREDUCE/package/scripts/mapreduce.py      |  77 +++++++
 .../MAPREDUCE/package/scripts/params.py         |   8 +-
 .../package/templates/taskcontroller.cfg.j2     |  38 ++++
 .../2.0.6/hooks/before-INSTALL/scripts/hook.py  |   1 +
 .../hooks/before-INSTALL/scripts/params.py      |  12 +
 .../scripts/shared_initialization.py            |  43 ++++
 .../2.0.6/hooks/before-RESTART/scripts/hook.py  |   2 +-
 .../2.0.6/hooks/before-START/scripts/hook.py    |   1 -
 .../2.0.6/hooks/before-START/scripts/params.py  |  12 -
 .../scripts/shared_initialization.py            | 145 +-----------
 .../hooks/before-START/templates/hdfs.conf.j2   |  35 ---
 .../hooks/before-START/templates/slaves.j2      |  21 --
 .../templates/taskcontroller.cfg.j2             |  38 ----
 .../services/HDFS/package/scripts/datanode.py   |   4 +-
 .../2.0.6/services/HDFS/package/scripts/hdfs.py |  63 ++++++
 .../HDFS/package/scripts/hdfs_client.py         |  17 +-
 .../HDFS/package/scripts/journalnode.py         |   3 +
 .../services/HDFS/package/scripts/namenode.py   |   2 +
 .../services/HDFS/package/scripts/params.py     |   2 +
 .../services/HDFS/package/scripts/snamenode.py  |   3 +-
 .../services/HDFS/package/scripts/zkfc_slave.py |   2 +
 .../HDFS/package/templates/hdfs.conf.j2         |  35 +++
 .../services/HDFS/package/templates/slaves.j2   |  21 ++
 .../services/YARN/package/scripts/params.py     |   9 +
 .../2.0.6/services/YARN/package/scripts/yarn.py |  77 +++++++
 .../package/templates/taskcontroller.cfg.j2     |  38 ++++
 .../python/stacks/1.3.2/HDFS/test_datanode.py   |  38 +++-
 .../python/stacks/1.3.2/HDFS/test_namenode.py   |  38 ++++
 .../python/stacks/1.3.2/HDFS/test_snamenode.py  |  35 +++
 .../1.3.2/MAPREDUCE/test_mapreduce_client.py    |  28 ++-
 .../MAPREDUCE/test_mapreduce_historyserver.py   |  27 +++
 .../MAPREDUCE/test_mapreduce_jobtracker.py      |  65 +++++-
 .../MAPREDUCE/test_mapreduce_tasktracker.py     |  31 ++-
 .../hooks/after-INSTALL/test_after_install.py   |  48 ++++
 .../hooks/before-INSTALL/test_before_install.py |  89 +++++---
 .../hooks/before-START/test_before_start.py     | 223 +++----------------
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |  36 ++-
 .../stacks/2.0.6/HDFS/test_journalnode.py       |  33 ++-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |  36 ++-
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  |  34 ++-
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  |  36 ++-
 .../2.0.6/HIVE/_test_hive_service_check.py      |  98 ++++++++
 .../2.0.6/HIVE/test_hive_service_check.py       |  98 --------
 .../stacks/2.0.6/YARN/test_historyserver.py     |  39 ++++
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |  39 ++++
 .../stacks/2.0.6/YARN/test_nodemanager.py       |  40 +++-
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |  41 +++-
 .../stacks/2.0.6/YARN/test_yarn_client.py       |  55 +++++
 .../hooks/before-INSTALL/test_before_install.py |  13 ++
 .../hooks/before-START/test_before_start.py     | 138 +-----------
 .../stacks/2.1/YARN/test_apptimelineserver.py   |  16 ++
 75 files changed, 1860 insertions(+), 1145 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-agent/src/main/python/resource_management/libraries/script/hook.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/script/hook.py b/ambari-agent/src/main/python/resource_management/libraries/script/hook.py
index 19b5204..3133bab 100644
--- a/ambari-agent/src/main/python/resource_management/libraries/script/hook.py
+++ b/ambari-agent/src/main/python/resource_management/libraries/script/hook.py
@@ -46,9 +46,11 @@ class Hook(Script):
     """
     args = sys.argv
     #Hook script to run
-    args[0] = args[0].replace(args[1], command)
+    args[0] = args[0].replace('before-'+args[1], command)
+    args[0] = args[0].replace('after-'+args[1], command)
     #Hook script base directory
-    args[3] = args[3].replace(args[1], command)
+    args[3] = args[3].replace('before-'+args[1], command)
+    args[3] = args[3].replace('after-'+args[1], command)
 
 
     cmd = [sys.executable]

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py
new file mode 100644
index 0000000..25a56ad
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/hook.py
@@ -0,0 +1,36 @@
+##!/usr/bin/env python2.6
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+from shared_initialization import *
+
+#Hook for hosts with only client without other components
+class AfterInstallHook(Hook):
+
+  def hook(self, env):
+    import params
+
+    env.set_params(params)
+    setup_hadoop_env()
+    setup_config()
+
+if __name__ == "__main__":
+  AfterInstallHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py
new file mode 100644
index 0000000..48b8701
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/params.py
@@ -0,0 +1,61 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from resource_management.core.system import System
+import os
+
+config = Script.get_config()
+
+#security params
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
+#java params
+java_home = config['hostLevelParams']['java_home']
+#hadoop params
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['global']['hadoop_pid_dir_prefix']
+
+#hadoop-env.sh
+if System.get_instance().os_family == "suse":
+  jsvc_path = "/usr/lib/bigtop-utils"
+else:
+  jsvc_path = "/usr/libexec/bigtop-utils"
+hadoop_heapsize = config['configurations']['global']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['global']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['global']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['global']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = default("jtnode_opt_newsize","200m")
+jtnode_opt_maxnewsize = default("jtnode_opt_maxnewsize","200m")
+jtnode_heapsize =  default("jtnode_heapsize","1024m")
+ttnode_heapsize = default("ttnode_heapsize","1024m")
+
+dtnode_heapsize = config['configurations']['global']['dtnode_heapsize']
+
+mapred_pid_dir_prefix = default("mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+mapred_log_dir_prefix = default("mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+
+
+#users and groups
+hdfs_user = config['configurations']['global']['hdfs_user']
+user_group = config['configurations']['global']['user_group']
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py
new file mode 100644
index 0000000..2a26e2e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -0,0 +1,45 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import os
+from resource_management import *
+
+def setup_hadoop_env():
+  import params
+  if params.security_enabled:
+    tc_owner = "root"
+  else:
+    tc_owner = params.hdfs_user
+  Directory(params.hadoop_conf_dir,
+            recursive=True,
+            owner='root',
+            group='root'
+  )
+  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+       owner=tc_owner,
+       content=Template('hadoop-env.sh.j2')
+  )
+
+def setup_config():
+  import params
+  XmlConfig("core-site.xml",
+            conf_dir=params.hadoop_conf_dir,
+            configurations=params.config['configurations']['core-site'],
+            owner=params.hdfs_user,
+            group=params.user_group
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/templates/hadoop-env.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/templates/hadoop-env.sh.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/templates/hadoop-env.sh.j2
new file mode 100644
index 0000000..76ac3f3
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/after-INSTALL/templates/hadoop-env.sh.j2
@@ -0,0 +1,139 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+#/*
+# * Licensed to the Apache Software Foundation (ASF) under one
+# * or more contributor license agreements.  See the NOTICE file
+# * distributed with this work for additional information
+# * regarding copyright ownership.  The ASF licenses this file
+# * to you under the Apache License, Version 2.0 (the
+# * "License"); you may not use this file except in compliance
+# * with the License.  You may obtain a copy of the License at
+# *
+# *     http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# */
+
+# Set Hadoop-specific environment variables here.
+
+# The only required environment variable is JAVA_HOME.  All others are
+# optional.  When running a distributed configuration it is best to
+# set JAVA_HOME in this file, so that it is correctly defined on
+# remote nodes.
+
+# The java implementation to use.  Required.
+export JAVA_HOME={{java_home}}
+export HADOOP_HOME_WARN_SUPPRESS=1
+
+# Hadoop Configuration Directory
+#TODO: if env var set that can cause problems
+export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}
+
+# this is different for HDP1 #
+# Path to jsvc required by secure HDP 2.0 datanode
+# export JSVC_HOME={{jsvc_path}}
+
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"
+
+export HADOOP_NAMENODE_INIT_HEAPSIZE="-Xms{{namenode_heapsize}}"
+
+# Extra Java runtime options.  Empty by default.
+export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}"
+
+# History server logs
+export HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER
+
+# Command specific options appended to HADOOP_OPTS when specified
+export HADOOP_NAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}"
+export HADOOP_JOBTRACKER_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA -Dmapred.log.dir=$HADOOP_MAPRED_LOG_DIR ${HADOOP_JOBTRACKER_OPTS}"
+
+HADOOP_TASKTRACKER_OPTS="-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}"
+HADOOP_DATANODE_OPTS="-Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_DATANODE_OPTS}"
+HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
+
+export HADOOP_SECONDARYNAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps ${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_SECONDARYNAMENODE_OPTS}"
+
+# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
+# On secure datanodes, user to run the datanode as after dropping privileges
+export HADOOP_SECURE_DN_USER={{hdfs_user}}
+
+# Extra ssh options.  Empty by default.
+export HADOOP_SSH_OPTS="-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR"
+
+# Where log files are stored.  $HADOOP_HOME/logs by default.
+export HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER
+
+# Where log files are stored in the secure data environment.
+export HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER
+
+# File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
+# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+
+# host:path where hadoop code should be rsync'd from.  Unset by default.
+# export HADOOP_MASTER=master:/home/$USER/src/hadoop
+
+# Seconds to sleep between slave commands.  Unset by default.  This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HADOOP_SLAVE_SLEEP=0.1
+
+# The directory where pid files are stored. /tmp by default.
+export HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER
+export HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER
+
+# History server pid
+export HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER
+
+YARN_RESOURCEMANAGER_OPTS="-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY"
+
+# A string representing this instance of hadoop. $USER by default.
+export HADOOP_IDENT_STRING=$USER
+
+# The scheduling priority for daemon processes.  See 'man nice'.
+
+# export HADOOP_NICENESS=10
+
+# Use libraries from standard classpath
+JAVA_JDBC_LIBS=""
+#Add libraries required by mysql connector
+for jarFile in `ls /usr/share/java/*mysql* 2>/dev/null`
+do
+  JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile
+done
+#Add libraries required by oracle connector
+for jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null`
+do
+  JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile
+done
+#Add libraries required by nodemanager
+MAPREDUCE_LIBS={{mapreduce_libs_path}}
+export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}
+
+# Setting path to hdfs command line
+export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
+
+#Mostly required for hadoop 2.0
+export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py
index 51e5cd2..03859e4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/hook.py
@@ -29,6 +29,7 @@ class BeforeConfigureHook(Hook):
     import params
 
     env.set_params(params)
+    setup_java()
     setup_users()
     install_packages()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
index fa19ca3..f2a4199 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
@@ -23,6 +23,49 @@ import os
 
 config = Script.get_config()
 
+#java params
+artifact_dir = "/tmp/HDP-artifacts/"
+jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
+jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
+jce_location = config['hostLevelParams']['jdk_location']
+jdk_location = config['hostLevelParams']['jdk_location']
+java_home = config['hostLevelParams']['java_home']
+if System.get_instance().os_family == "suse":
+  jsvc_path = "/usr/lib/bigtop-utils"
+else:
+  jsvc_path = "/usr/libexec/bigtop-utils"
+#security params
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
+#hadoop params
+hadoop_conf_dir = "/etc/hadoop/conf"
+
+#hadoop-env.sh
+
+java_home = config['hostLevelParams']['java_home']
+if System.get_instance().os_family == "suse":
+  jsvc_path = "/usr/lib/bigtop-utils"
+else:
+  jsvc_path = "/usr/libexec/bigtop-utils"
+hadoop_heapsize = config['configurations']['global']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['global']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['global']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['global']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = default("jtnode_opt_newsize","200m")
+jtnode_opt_maxnewsize = default("jtnode_opt_maxnewsize","200m")
+jtnode_heapsize =  default("jtnode_heapsize","1024m")
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['global']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+mapred_log_dir_prefix = default("mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+
+hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['global']['hadoop_pid_dir_prefix']
+
 #users and groups
 yarn_user = config['configurations']['global']['yarn_user']
 hbase_user = config['configurations']['global']['hbase_user']

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
index 6a37d02..98b5dea 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
@@ -101,6 +101,49 @@ def set_uid(user, user_dirs):
   Execute(format("/tmp/changeUid.sh {user} {user_dirs} 2>/dev/null"),
           not_if = format("test $(id -u {user}) -gt 1000"))
 
+def setup_java():
+  """
+  Installs jdk using specific params, that comes from ambari-server
+  """
+  import params
+
+  jdk_curl_target = format("{artifact_dir}/{jdk_name}")
+  java_dir = os.path.dirname(params.java_home)
+  java_exec = format("{java_home}/bin/java")
+
+  if not params.jdk_name:
+    return
+
+  Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
+          path = ["/bin","/usr/bin/"],
+          not_if = format("test -e {java_exec}"))
+
+  if params.jdk_name.endswith(".bin"):
+    install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1")
+  elif params.jdk_name.endswith(".gz"):
+    install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1")
+
+  Execute(install_cmd,
+          path = ["/bin","/usr/bin/"],
+          not_if = format("test -e {java_exec}")
+  )
+  jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
+  download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
+  Execute( download_jce,
+           path = ["/bin","/usr/bin/"],
+           not_if =format("test -e {jce_curl_target}"),
+           ignore_failures = True
+  )
+
+  if params.security_enabled:
+    security_dir = format("{java_home}/jre/lib/security")
+    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
+    Execute(extract_cmd,
+            only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
+            cwd  = security_dir,
+            path = ['/bin/','/usr/bin']
+    )
+
 def install_packages():
   packages = {"redhat": ["net-snmp-utils", "net-snmp"],
               "suse": ["net-snmp"],

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py
index 7042602..0596106 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-RESTART/scripts/hook.py
@@ -24,7 +24,7 @@ from resource_management import *
 class BeforeConfigureHook(Hook):
 
   def hook(self, env):
-    self.run_custom_hook('START')
+    self.run_custom_hook('before-START')
 
 if __name__ == "__main__":
   BeforeConfigureHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py
index 075a6b6..31f70bb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/hook.py
@@ -29,8 +29,9 @@ class BeforeConfigureHook(Hook):
     import params
 
     env.set_params(params)
-    setup_java()
+    self.run_custom_hook('after-INSTALL')
     setup_hadoop()
+    setup_database()
     setup_configs()
     create_javahome_symlink()
     init_services()

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
index 2db9b4b..61e04f8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
@@ -23,23 +23,14 @@ import os
 
 config = Script.get_config()
 
-#java params
-artifact_dir = "/tmp/HDP-artifacts/"
-jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
-jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
-jce_location = config['hostLevelParams']['jdk_location']
-jdk_location = config['hostLevelParams']['jdk_location']
 #security params
 _authentication = config['configurations']['core-site']['hadoop.security.authentication']
 security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
 
 #users and groups
-mapred_user = config['configurations']['global']['mapred_user']
 hdfs_user = config['configurations']['global']['hdfs_user']
-yarn_user = config['configurations']['global']['yarn_user']
 
 user_group = config['configurations']['global']['user_group']
-mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
 #snmp
 snmp_conf_dir = "/etc/snmp/"

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
index c6cc432..35f22ba 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/shared_initialization.py
@@ -21,49 +21,6 @@ import os
 
 from resource_management import *
 
-def setup_java():
-  """
-  Installs jdk using specific params, that comes from ambari-server
-  """
-  import params
-
-  jdk_curl_target = format("{artifact_dir}/{jdk_name}")
-  java_dir = os.path.dirname(params.java_home)
-  java_exec = format("{java_home}/bin/java")
-  
-  if not params.jdk_name:
-    return
-  
-  Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
-          path = ["/bin","/usr/bin/"],
-          not_if = format("test -e {java_exec}"))
-
-  if params.jdk_name.endswith(".bin"):
-    install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1")
-  elif params.jdk_name.endswith(".gz"):
-    install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1")
-  
-  Execute(install_cmd,
-          path = ["/bin","/usr/bin/"],
-          not_if = format("test -e {java_exec}")
-  )
-  jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
-  download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
-  Execute( download_jce,
-        path = ["/bin","/usr/bin/"],
-        not_if =format("test -e {jce_curl_target}"),
-        ignore_failures = True
-  )
-  
-  if params.security_enabled:
-    security_dir = format("{java_home}/jre/lib/security")
-    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
-    Execute(extract_cmd,
-          only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
-          cwd  = security_dir,
-          path = ['/bin/','/usr/bin']
-    )
-
 def setup_hadoop():
   """
   Setup hadoop files and directories
@@ -77,11 +34,6 @@ def setup_hadoop():
   install_snappy()
 
   #directories
-  Directory(params.hadoop_conf_dir,
-            recursive=True,
-            owner='root',
-            group='root'
-  )
   Directory(params.hdfs_log_dir_prefix,
             recursive=True,
             owner='root',
@@ -94,40 +46,14 @@ def setup_hadoop():
   )
 
   #files
-  File(os.path.join(params.limits_conf_dir, 'hdfs.conf'),
-       owner='root',
-       group='root',
-       mode=0644,
-       content=Template("hdfs.conf.j2")
-  )
   if params.security_enabled:
-    File(os.path.join(params.hadoop_bin, "task-controller"),
-         owner="root",
-         group=params.mapred_tt_group,
-         mode=06050
-    )
-    tc_mode = 0644
     tc_owner = "root"
   else:
-    tc_mode = None
     tc_owner = params.hdfs_user
 
-  if tc_mode:
-    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
-         owner = tc_owner,
-         mode = tc_mode,
-         group = params.mapred_tt_group,
-         content=Template("taskcontroller.cfg.j2")
-    )
-  else:
-    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+    File(os.path.join(params.hadoop_conf_dir, 'commons-logging.properties'),
          owner=tc_owner,
-         content=Template("taskcontroller.cfg.j2")
-    )
-  for file in ['hadoop-env.sh', 'commons-logging.properties', 'slaves']:
-    File(os.path.join(params.hadoop_conf_dir, file),
-         owner=tc_owner,
-         content=Template(file + ".j2")
+         content=Template("commons-logging.properties.j2")
     )
 
   health_check_template = "health_check" #for stack 1 use 'health_check'
@@ -156,6 +82,11 @@ def setup_hadoop():
        content=Template("hadoop-metrics2.properties.j2")
   )
 
+def setup_database():
+  """
+  Load DB
+  """
+  import params
   db_driver_dload_cmd = ""
   if params.server_db_name == 'oracle' and params.oracle_driver_url != "":
     db_driver_dload_cmd = format(
@@ -176,66 +107,17 @@ def setup_configs():
   """
   import params
 
-  if "mapred-queue-acls" in params.config['configurations']:
-    XmlConfig("mapred-queue-acls.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations'][
-                'mapred-queue-acls'],
-              owner=params.mapred_user,
-              group=params.user_group
-    )
-  elif os.path.exists(
-      os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")):
-    File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
-
-  if "hadoop-policy" in params.config['configurations']:
-    XmlConfig("hadoop-policy.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['hadoop-policy'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-
   XmlConfig("core-site.xml",
             conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['core-site'],
             owner=params.hdfs_user,
             group=params.user_group
   )
-
-  if "mapred-site" in params.config['configurations']:
-    XmlConfig("mapred-site.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['mapred-site'],
-              owner=params.mapred_user,
-              group=params.user_group
-    )
-
   File(params.task_log4j_properties_location,
        content=StaticFile("task-log4j.properties"),
        mode=0755
   )
 
-  if "capacity-scheduler" in params.config['configurations']:
-    XmlConfig("capacity-scheduler.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations'][
-                'capacity-scheduler'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-
-  XmlConfig("hdfs-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['hdfs-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-  )
-
-  # if params.stack_version[0] == "1":
   Link('/usr/lib/hadoop/lib/hadoop-tools.jar',
        to = '/usr/lib/hadoop/hadoop-tools.jar'
   )
@@ -245,26 +127,10 @@ def setup_configs():
          owner=params.hdfs_user,
          group=params.user_group
     )
-  if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')):
-    File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
+
   if os.path.exists(os.path.join(params.hadoop_conf_dir, 'masters')):
     File(os.path.join(params.hadoop_conf_dir, 'masters'),
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-  if os.path.exists(
-      os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')):
-    File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
-  if os.path.exists(
-      os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')):
-    File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'),
-         owner=params.mapred_user,
+         owner=params.hdfs_user,
          group=params.user_group
     )
 
@@ -312,4 +178,4 @@ def init_services():
   # enable snmpd
   Execute( "service snmpd start; chkconfig snmpd on",
     path = "/usr/local/bin/:/bin/:/sbin/"
-  )  
\ No newline at end of file
+  )  

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hadoop-env.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hadoop-env.sh.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hadoop-env.sh.j2
deleted file mode 100644
index 76ac3f3..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hadoop-env.sh.j2
+++ /dev/null
@@ -1,139 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-#/*
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements.  See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership.  The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License.  You may obtain a copy of the License at
-# *
-# *     http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
-
-# Set Hadoop-specific environment variables here.
-
-# The only required environment variable is JAVA_HOME.  All others are
-# optional.  When running a distributed configuration it is best to
-# set JAVA_HOME in this file, so that it is correctly defined on
-# remote nodes.
-
-# The java implementation to use.  Required.
-export JAVA_HOME={{java_home}}
-export HADOOP_HOME_WARN_SUPPRESS=1
-
-# Hadoop Configuration Directory
-#TODO: if env var set that can cause problems
-export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}
-
-# this is different for HDP1 #
-# Path to jsvc required by secure HDP 2.0 datanode
-# export JSVC_HOME={{jsvc_path}}
-
-
-# The maximum amount of heap to use, in MB. Default is 1000.
-export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"
-
-export HADOOP_NAMENODE_INIT_HEAPSIZE="-Xms{{namenode_heapsize}}"
-
-# Extra Java runtime options.  Empty by default.
-export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}"
-
-# History server logs
-export HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER
-
-# Command specific options appended to HADOOP_OPTS when specified
-export HADOOP_NAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}"
-export HADOOP_JOBTRACKER_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA -Dmapred.log.dir=$HADOOP_MAPRED_LOG_DIR ${HADOOP_JOBTRACKER_OPTS}"
-
-HADOOP_TASKTRACKER_OPTS="-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}"
-HADOOP_DATANODE_OPTS="-Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_DATANODE_OPTS}"
-HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
-
-export HADOOP_SECONDARYNAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps ${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_SECONDARYNAMENODE_OPTS}"
-
-# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
-export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
-# On secure datanodes, user to run the datanode as after dropping privileges
-export HADOOP_SECURE_DN_USER={{hdfs_user}}
-
-# Extra ssh options.  Empty by default.
-export HADOOP_SSH_OPTS="-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR"
-
-# Where log files are stored.  $HADOOP_HOME/logs by default.
-export HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER
-
-# Where log files are stored in the secure data environment.
-export HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER
-
-# File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
-# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
-
-# host:path where hadoop code should be rsync'd from.  Unset by default.
-# export HADOOP_MASTER=master:/home/$USER/src/hadoop
-
-# Seconds to sleep between slave commands.  Unset by default.  This
-# can be useful in large clusters, where, e.g., slave rsyncs can
-# otherwise arrive faster than the master can service them.
-# export HADOOP_SLAVE_SLEEP=0.1
-
-# The directory where pid files are stored. /tmp by default.
-export HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER
-export HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER
-
-# History server pid
-export HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER
-
-YARN_RESOURCEMANAGER_OPTS="-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY"
-
-# A string representing this instance of hadoop. $USER by default.
-export HADOOP_IDENT_STRING=$USER
-
-# The scheduling priority for daemon processes.  See 'man nice'.
-
-# export HADOOP_NICENESS=10
-
-# Use libraries from standard classpath
-JAVA_JDBC_LIBS=""
-#Add libraries required by mysql connector
-for jarFile in `ls /usr/share/java/*mysql* 2>/dev/null`
-do
-  JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile
-done
-#Add libraries required by oracle connector
-for jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null`
-do
-  JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile
-done
-#Add libraries required by nodemanager
-MAPREDUCE_LIBS={{mapreduce_libs_path}}
-export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}
-
-# Setting path to hdfs command line
-export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
-
-#Mostly required for hadoop 2.0
-export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hdfs.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hdfs.conf.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hdfs.conf.j2
deleted file mode 100644
index d58a6f5..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/hdfs.conf.j2
+++ /dev/null
@@ -1,35 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-{{hdfs_user}}   - nofile 32768
-{{hdfs_user}}   - nproc  65536

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/slaves.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/slaves.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/slaves.j2
deleted file mode 100644
index 4a9e713..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/slaves.j2
+++ /dev/null
@@ -1,21 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-{% for host in slave_hosts %}
-{{host}}
-{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/taskcontroller.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/taskcontroller.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/taskcontroller.cfg.j2
deleted file mode 100644
index 3d5f4f2..0000000
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/templates/taskcontroller.cfg.j2
+++ /dev/null
@@ -1,38 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-#/*
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements.  See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership.  The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License.  You may obtain a copy of the License at
-# *
-# *     http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing, software
-# * distributed under the License is distributed on an "AS IS" BASIS,
-# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# * See the License for the specific language governing permissions and
-# * limitations under the License.
-# */
-mapred.local.dir={{mapred_local_dir}}
-mapreduce.tasktracker.group={{mapred_tt_group}}
-hadoop.log.dir={{hdfs_log_dir_prefix}}/{{mapred_user}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py
index 57fdb35..379594d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/datanode.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_datanode import datanode
+from hdfs import hdfs
 
 
 class DataNode(Script):
@@ -44,6 +45,8 @@ class DataNode(Script):
   def configure(self, env):
     import params
 
+    env.set_params(params)
+    hdfs()
     datanode(action="configure")
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs.py
new file mode 100644
index 0000000..06bf583
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python2.6
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+import sys
+import os
+
+
+def hdfs(name=None):
+  import params
+
+  File(os.path.join(params.limits_conf_dir, 'hdfs.conf'),
+       owner='root',
+       group='root',
+       mode=0644,
+       content=Template("hdfs.conf.j2")
+  )
+
+  if params.security_enabled:
+    tc_mode = 0644
+    tc_owner = "root"
+  else:
+    tc_mode = None
+    tc_owner = params.hdfs_user
+
+  if "hadoop-policy" in params.config['configurations']:
+    XmlConfig("hadoop-policy.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['hadoop-policy'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )
+
+  XmlConfig("hdfs-site.xml",
+            conf_dir=params.hadoop_conf_dir,
+            configurations=params.config['configurations']['hdfs-site'],
+            owner=params.hdfs_user,
+            group=params.user_group
+  )
+
+  File(os.path.join(params.hadoop_conf_dir, 'slaves'),
+       owner=tc_owner,
+       content=Template("slaves.j2")
+  )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py
index 40bde89..3adc40d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_client.py
@@ -18,6 +18,7 @@ limitations under the License.
 """
 
 from resource_management import *
+from hdfs import hdfs
 from utils import service
 
 
@@ -44,20 +45,7 @@ class HdfsClient(Script):
 
   def configure(self, env):
     import params
-
-    XmlConfig("core-site.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['core-site'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-    
-    XmlConfig("hdfs-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['hdfs-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-    )
+    hdfs()
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py
index 2f26c98..759832c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/namenode.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_namenode import namenode
+from hdfs import hdfs
 
 
 class NameNode(Script):
@@ -45,6 +46,7 @@ class NameNode(Script):
     import params
 
     env.set_params(params)
+    hdfs()
     namenode(action="configure")
     pass
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
index 98d536c..9307910 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
@@ -143,6 +143,4 @@ HdfsDirectory = functools.partial(
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local
 )
-
-
-
+limits_conf_dir = "/etc/security/limits.d"
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py
index b2a3bd1..62fe2bc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/snamenode.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 from resource_management import *
 from hdfs_snamenode import snamenode
+from hdfs import hdfs
 
 
 class SNameNode(Script):
@@ -49,7 +50,7 @@ class SNameNode(Script):
     import params
 
     env.set_params(params)
-
+    hdfs()
     snamenode(action="configure")
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/hdfs.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/hdfs.conf.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/hdfs.conf.j2
new file mode 100644
index 0000000..d58a6f5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/hdfs.conf.j2
@@ -0,0 +1,35 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{{hdfs_user}}   - nofile 32768
+{{hdfs_user}}   - nproc  65536

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/slaves.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/slaves.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/slaves.j2
new file mode 100644
index 0000000..4a9e713
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/templates/slaves.j2
@@ -0,0 +1,21 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+{% for host in slave_hosts %}
+{{host}}
+{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py
index 4b26814..a957d78 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py
@@ -97,3 +97,80 @@ def mapreduce(name=None):
             owner=params.mapred_user,
             group=params.user_group,
   )
+
+  if params.security_enabled:
+    tc_mode = 0644
+    tc_owner = "root"
+  else:
+    tc_mode = None
+    tc_owner = params.hdfs_user
+
+  if params.security_enabled:
+    File(os.path.join(params.hadoop_bin, "task-controller"),
+         owner="root",
+         group=params.mapred_tt_group,
+         mode=06050
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+         owner = tc_owner,
+         mode = tc_mode,
+         group = params.mapred_tt_group,
+         content=Template("taskcontroller.cfg.j2")
+    )
+  else:
+    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+         owner=tc_owner,
+         content=Template("taskcontroller.cfg.j2")
+    )
+
+  if "capacity-scheduler" in params.config['configurations']:
+    XmlConfig("capacity-scheduler.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations'][
+                'capacity-scheduler'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )
+
+  if "mapred-queue-acls" in params.config['configurations']:
+    XmlConfig("mapred-queue-acls.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations'][
+                'mapred-queue-acls'],
+              owner=params.mapred_user,
+              group=params.user_group
+    )
+  elif os.path.exists(
+    os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")):
+    File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if "mapred-site" in params.config['configurations']:
+    XmlConfig("mapred-site.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['mapred-site'],
+              owner=params.mapred_user,
+              group=params.user_group
+    )
+
+  if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')):
+    File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if os.path.exists(
+    os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')):
+    File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )
+
+  if os.path.exists(
+    os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')):
+    File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'),
+         owner=params.mapred_user,
+         group=params.user_group
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
index 1a77f95..f6f4367 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
@@ -61,9 +61,9 @@ mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapred.
 #for create_hdfs_directory
 hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_pid_dir_prefix = config['configurations']['global']['hadoop_pid_dir_prefix']
 hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
 hdfs_user = config['configurations']['global']['hdfs_user']
-kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code
@@ -74,4 +74,8 @@ HdfsDirectory = functools.partial(
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local
-)
\ No newline at end of file
+)
+
+mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
+
+slave_hosts = default("/clusterHostInfo/slave_hosts", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/templates/taskcontroller.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/templates/taskcontroller.cfg.j2 b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/templates/taskcontroller.cfg.j2
new file mode 100644
index 0000000..3d5f4f2
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/templates/taskcontroller.cfg.j2
@@ -0,0 +1,38 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+#/*
+# * Licensed to the Apache Software Foundation (ASF) under one
+# * or more contributor license agreements.  See the NOTICE file
+# * distributed with this work for additional information
+# * regarding copyright ownership.  The ASF licenses this file
+# * to you under the Apache License, Version 2.0 (the
+# * "License"); you may not use this file except in compliance
+# * with the License.  You may obtain a copy of the License at
+# *
+# *     http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# */
+mapred.local.dir={{mapred_local_dir}}
+mapreduce.tasktracker.group={{mapred_tt_group}}
+hadoop.log.dir={{hdfs_log_dir_prefix}}/{{mapred_user}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py
index 51e5cd2..03859e4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/hook.py
@@ -29,6 +29,7 @@ class BeforeConfigureHook(Hook):
     import params
 
     env.set_params(params)
+    setup_java()
     setup_users()
     install_packages()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
index 341d86f..1f8bfa8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
@@ -87,3 +87,15 @@ if has_ganglia_server:
   ganglia_server_host = ganglia_server_hosts[0]
 
 hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
+
+#security params
+_authentication = config['configurations']['core-site']['hadoop.security.authentication']
+security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
+
+#java params
+java_home = config['hostLevelParams']['java_home']
+artifact_dir = "/tmp/HDP-artifacts/"
+jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
+jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
+jce_location = config['hostLevelParams']['jdk_location']
+jdk_location = config['hostLevelParams']['jdk_location']

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
index c7c0c70..0ae8b19 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
@@ -119,6 +119,49 @@ def set_uid(user, user_dirs):
   Execute(format("/tmp/changeUid.sh {user} {user_dirs} 2>/dev/null"),
           not_if = format("test $(id -u {user}) -gt 1000"))
 
+def setup_java():
+  """
+  Installs jdk using specific params, that comes from ambari-server
+  """
+  import params
+
+  jdk_curl_target = format("{artifact_dir}/{jdk_name}")
+  java_dir = os.path.dirname(params.java_home)
+  java_exec = format("{java_home}/bin/java")
+
+  if not params.jdk_name:
+    return
+
+  Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
+          path = ["/bin","/usr/bin/"],
+          not_if = format("test -e {java_exec}"))
+
+  if params.jdk_name.endswith(".bin"):
+    install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1")
+  elif params.jdk_name.endswith(".gz"):
+    install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1")
+
+  Execute(install_cmd,
+          path = ["/bin","/usr/bin/"],
+          not_if = format("test -e {java_exec}")
+  )
+  jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
+  download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
+  Execute( download_jce,
+           path = ["/bin","/usr/bin/"],
+           not_if =format("test -e {jce_curl_target}"),
+           ignore_failures = True
+  )
+
+  if params.security_enabled:
+    security_dir = format("{java_home}/jre/lib/security")
+    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
+    Execute(extract_cmd,
+            only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
+            cwd  = security_dir,
+            path = ['/bin/','/usr/bin']
+    )
+
 def install_packages():
   packages = {"redhat": ["net-snmp-utils", "net-snmp"],
               "suse": ["net-snmp"],

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py
index 7042602..0596106 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-RESTART/scripts/hook.py
@@ -24,7 +24,7 @@ from resource_management import *
 class BeforeConfigureHook(Hook):
 
   def hook(self, env):
-    self.run_custom_hook('START')
+    self.run_custom_hook('before-START')
 
 if __name__ == "__main__":
   BeforeConfigureHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
index 075a6b6..979c628 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/hook.py
@@ -29,7 +29,6 @@ class BeforeConfigureHook(Hook):
     import params
 
     env.set_params(params)
-    setup_java()
     setup_hadoop()
     setup_configs()
     create_javahome_symlink()

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index f1ef84a..8b342f7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -23,12 +23,6 @@ import os
 
 config = Script.get_config()
 
-#java params
-artifact_dir = "/tmp/HDP-artifacts/"
-jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
-jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
-jce_location = config['hostLevelParams']['jdk_location']
-jdk_location = config['hostLevelParams']['jdk_location']
 #security params
 _authentication = config['configurations']['core-site']['hadoop.security.authentication']
 security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
@@ -39,7 +33,6 @@ hdfs_user = config['configurations']['global']['hdfs_user']
 yarn_user = config['configurations']['global']['yarn_user']
 
 user_group = config['configurations']['global']['user_group']
-mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
 
 #snmp
 snmp_conf_dir = "/etc/snmp/"
@@ -88,7 +81,6 @@ hadoop_home = "/usr"
 hadoop_bin = "/usr/lib/hadoop/sbin"
 
 task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
-limits_conf_dir = "/etc/security/limits.d"
 
 hdfs_log_dir_prefix = config['configurations']['global']['hdfs_log_dir_prefix']
 hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
@@ -138,10 +130,6 @@ mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 mapred_log_dir_prefix = default("mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
 
-#taskcontroller.cfg
-
-mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
-
 #log4j.properties
 
 yarn_log_dir_prefix = default("yarn_log_dir_prefix","/var/log/hadoop-yarn")

http://git-wip-us.apache.org/repos/asf/ambari/blob/b00d45e5/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
index f132c2e..6e34a95 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
@@ -21,49 +21,6 @@ import os
 
 from resource_management import *
 
-def setup_java():
-  """
-  Installs jdk using specific params, that comes from ambari-server
-  """
-  import params
-
-  jdk_curl_target = format("{artifact_dir}/{jdk_name}")
-  java_dir = os.path.dirname(params.java_home)
-  java_exec = format("{java_home}/bin/java")
-  
-  if not params.jdk_name:
-    return
-  
-  Execute(format("mkdir -p {artifact_dir} ; curl -kf --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
-          path = ["/bin","/usr/bin/"],
-          not_if = format("test -e {java_exec}"))
-
-  if params.jdk_name.endswith(".bin"):
-    install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1")
-  elif params.jdk_name.endswith(".gz"):
-    install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1")
-  
-  Execute(install_cmd,
-          path = ["/bin","/usr/bin/"],
-          not_if = format("test -e {java_exec}")
-  )
-  jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
-  download_jce = format("mkdir -p {artifact_dir}; curl -kf --retry 10 {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
-  Execute( download_jce,
-        path = ["/bin","/usr/bin/"],
-        not_if =format("test -e {jce_curl_target}"),
-        ignore_failures = True
-  )
-  
-  if params.security_enabled:
-    security_dir = format("{java_home}/jre/lib/security")
-    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
-    Execute(extract_cmd,
-          only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
-          cwd  = security_dir,
-          path = ['/bin/','/usr/bin']
-    )
-
 def setup_hadoop():
   """
   Setup hadoop files and directories
@@ -98,37 +55,12 @@ def setup_hadoop():
             owner=params.hdfs_user,
             )
   #files
-  File(os.path.join(params.limits_conf_dir, 'hdfs.conf'),
-       owner='root',
-       group='root',
-       mode=0644,
-       content=Template("hdfs.conf.j2")
-  )
   if params.security_enabled:
-    File(os.path.join(params.hadoop_bin, "task-controller"),
-         owner="root",
-         group=params.mapred_tt_group,
-         mode=06050
-    )
-    tc_mode = 0644
     tc_owner = "root"
   else:
-    tc_mode = None
     tc_owner = params.hdfs_user
 
-  if tc_mode:
-    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
-         owner = tc_owner,
-         mode = tc_mode,
-         group = params.mapred_tt_group,
-         content=Template("taskcontroller.cfg.j2")
-    )
-  else:
-    File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
-         owner=tc_owner,
-         content=Template("taskcontroller.cfg.j2")
-    )
-  for file in ['hadoop-env.sh', 'commons-logging.properties', 'slaves']:
+  for file in ['hadoop-env.sh', 'commons-logging.properties']:
     File(os.path.join(params.hadoop_conf_dir, file),
          owner=tc_owner,
          content=Template(file + ".j2")
@@ -160,6 +92,11 @@ def setup_hadoop():
        content=Template("hadoop-metrics2.properties.j2")
   )
 
+def setup_database():
+  """
+  Load DB
+  """
+  import params
   db_driver_dload_cmd = ""
   if params.server_db_name == 'oracle' and params.oracle_driver_url != "":
     db_driver_dload_cmd = format(
@@ -180,29 +117,6 @@ def setup_configs():
   """
   import params
 
-  if "mapred-queue-acls" in params.config['configurations']:
-    XmlConfig("mapred-queue-acls.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations'][
-                'mapred-queue-acls'],
-              owner=params.mapred_user,
-              group=params.user_group
-    )
-  elif os.path.exists(
-      os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml")):
-    File(os.path.join(params.hadoop_conf_dir, "mapred-queue-acls.xml"),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
-
-  if "hadoop-policy" in params.config['configurations']:
-    XmlConfig("hadoop-policy.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['hadoop-policy'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-
   XmlConfig("core-site.xml",
             conf_dir=params.hadoop_conf_dir,
             configurations=params.config['configurations']['core-site'],
@@ -210,68 +124,21 @@ def setup_configs():
             group=params.user_group
   )
 
-  if "mapred-site" in params.config['configurations']:
-    XmlConfig("mapred-site.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations']['mapred-site'],
-              owner=params.mapred_user,
-              group=params.user_group
-    )
-
   File(params.task_log4j_properties_location,
        content=StaticFile("task-log4j.properties"),
        mode=0755
   )
 
-  if "capacity-scheduler" in params.config['configurations']:
-    XmlConfig("capacity-scheduler.xml",
-              conf_dir=params.hadoop_conf_dir,
-              configurations=params.config['configurations'][
-                'capacity-scheduler'],
-              owner=params.hdfs_user,
-              group=params.user_group
-    )
-
-  XmlConfig("hdfs-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['hdfs-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-  )
-
-  # if params.stack_version[0] == "1":
-  #   Link('/usr/lib/hadoop/hadoop-tools.jar',
-  #         to = '/usr/lib/hadoop/lib/hadoop-tools.jar',
-  #         mode = 0755
-  #   )
-
   if os.path.exists(os.path.join(params.hadoop_conf_dir, 'configuration.xsl')):
     File(os.path.join(params.hadoop_conf_dir, 'configuration.xsl'),
          owner=params.hdfs_user,
          group=params.user_group
     )
-  if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')):
-    File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
   if os.path.exists(os.path.join(params.hadoop_conf_dir, 'masters')):
     File(os.path.join(params.hadoop_conf_dir, 'masters'),
               owner=params.hdfs_user,
               group=params.user_group
     )
-  if os.path.exists(
-      os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')):
-    File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
-  if os.path.exists(
-      os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')):
-    File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'),
-         owner=params.mapred_user,
-         group=params.user_group
-    )
 
   generate_include_file()